diff --git a/cosipy/__init__.py b/cosipy/__init__.py index 7b1abb99..61184ff4 100644 --- a/cosipy/__init__.py +++ b/cosipy/__init__.py @@ -2,15 +2,16 @@ from .response import DetectorResponse +from .spacecraftfile import * + from .data_io import DataIO from .data_io import UnBinnedData from .data_io import BinnedData from .data_io import ReadTraTest -from .threeml import COSILike from .threeml import Band_Eflux -from .spacecraftfile import SpacecraftFile +from .spacecraftfile import SpacecraftHistory from .ts_map import FastTSMap, MOCTSMap diff --git a/cosipy/background_estimation/ContinuumEstimation.py b/cosipy/background_estimation/ContinuumEstimation.py index 79f1eef8..ea90600c 100644 --- a/cosipy/background_estimation/ContinuumEstimation.py +++ b/cosipy/background_estimation/ContinuumEstimation.py @@ -23,7 +23,7 @@ def calc_psr(self, sc_orientation, detector_response, coord, nside=16): ---------- ori_file : str Full path to orienation file. - sc_orientation : cosipy.spacecraftfile.SpacecraftFile + sc_orientation : cosipy.spacecraftfile.SpacecraftHistory Spacecraft orientation object. detector_response : str Full path to detector response file. diff --git a/cosipy/background_estimation/__init__.py b/cosipy/background_estimation/__init__.py index 22ee8ac4..ee2ba681 100644 --- a/cosipy/background_estimation/__init__.py +++ b/cosipy/background_estimation/__init__.py @@ -1,2 +1,3 @@ from .LineBackgroundEstimation import LineBackgroundEstimation from .ContinuumEstimation import ContinuumEstimation +from .free_norm_threeml_binned_bkg import * diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py new file mode 100644 index 00000000..f4e11dc1 --- /dev/null +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -0,0 +1,307 @@ +import itertools +from typing import Dict, Tuple, Union, Any, Type, Optional, Iterable + +import numpy as np +from astromodels import Parameter +from astropy.coordinates import SkyCoord, CartesianRepresentation, UnitSphericalRepresentation +from astropy.time import Time +from histpy import Histogram +from histpy import Axes + +from astropy import units as u +from scoords import SpacecraftFrame + +from cosipy import SpacecraftHistory +from cosipy.data_io.EmCDSUnbinnedData import TimeTagEmCDSEventInSCFrame +from cosipy.interfaces import BinnedBackgroundInterface, BinnedDataInterface, DataInterface, BackgroundDensityInterface, \ + BackgroundInterface, EventInterface + +__all__ = ["FreeNormBinnedBackground"] + +from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface + +from cosipy.interfaces.event import TimeTagEmCDSEventInSCFrameInterface +from cosipy.util.iterables import itertools_batched + +class FreeNormBackground(BackgroundInterface): + """ + This must translate to/from regular parameters + with arbitrary type from/to 3ML parameters + + Default to "bkg_norm" is there was a single unlabeled component + """ + + _default_label = 'bkg_norm' + + def __init__(self, + distribution:Union[Histogram, Dict[str, Histogram]], + sc_history:SpacecraftHistory, + copy = True): + """ + + Parameters + ---------- + distribution + sc_history + copy: copy hist distribution + """ + + if isinstance(distribution, Histogram): + # Single component + self._distributions = {self._default_label: distribution} + self._norms = np.ones(1) # Hz. Each component + self._norm = 1 # Hz. Total + self._single_component = True + else: + # Multiple label components. + self._distributions = distribution + self._norms = np.ones(self.ncomponents) # Hz Each component + self._norm = np.sum(self._norms) # Hz. Total + self._single_component = False + + self._labels = tuple(self._distributions.keys()) + + # Normalize + # Unit: second + self._livetime = sc_history.cumulative_livetime().to_value(u.s) + for label,dist in self._distributions.items(): + dist_norm = np.sum(dist) + if copy: + self._distributions[label] = dist/dist_norm + else: + dist /= dist_norm + + # These will be densify anyway since _expectation is dense + # And histpy doesn't yet handle this operation efficiently + # See Histogram._inplace_operation_handle_sparse() + # Do it once and for all + for label, bkg in self._distributions.items(): + if bkg.is_sparse: + self._distributions[label] = bkg.to_dense() + + if self.ncomponents == 0: + raise ValueError("You need to input at least one components") + + self._axes = None + for bkg in self._distributions.values(): + if self._axes is None: + self._axes = bkg.axes + else: + if self._axes != bkg.axes: + raise ValueError("All background components mus have the same axes") + + @property + def norm(self): + """ + Sum of all rates + """ + + return u.Quantity(self._norm, u.Hz) + + @property + def norms(self): + if self._single_component: + return {self._default_label: u.Quantity(self._norms[0], u.Hz)} + else: + return {l:u.Quantity(n, u.Hz, copy = False) for l,n in zip(self.labels,self._norms)} + + @property + def ncomponents(self): + return len(self._distributions) + + @property + def meausured_axes(self): + return self._axes + + @property + def labels(self): + return self._labels + + def set_norm(self, norm: Union[u.Quantity, Dict[str, u.Quantity]]): + + if self._single_component: + if isinstance(norm, dict): + self._norms[0] = norm[self._default_label].to_value(u.Hz) + else: + self._norms[0] = norm.to_value(u.Hz) + else: + # Multiple + if not isinstance(norm, dict): + raise TypeError("This a multi-component background. Provide labeled norm values in a dictionary") + + for label,norm_i in norm.items(): + if label not in self.labels: + raise ValueError(f"Norm {label} not in {self.labels}") + + self._norms[self.labels.index(label)] = norm_i.to_value(u.Hz) + + self._norm = sum(n for n in self._norms) + + def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: + """ + Same keys as background components + """ + + self.set_norm(parameters) + + @property + def parameters(self) -> Dict[str, u.Quantity]: + return self.norms + +class FreeNormBinnedBackground(FreeNormBackground, BinnedBackgroundInterface): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Cache + self._expectation = None + self._last_norm_values = None + + def expectation(self, axes:Axes, copy:bool = True)->Histogram: + """ + + Parameters + ---------- + axes + copy: + If True, it will return an array that the user if free to modify. + Otherwise, it will result a reference, possible to the cache, that + the user should not modify + + Returns + ------- + + """ + + if axes != self.meausured_axes: + raise ValueError("Requested axes do not match the background component axes") + + # Check if we can use the cache + if self._expectation is None: + # First call. Initialize + self._expectation = Histogram(self.meausured_axes) + + elif self.norms == self._last_norm_values: + # No changes. Use cache + if copy: + return self._expectation.copy() + else: + return self._expectation + + else: + # First call or norms have change. Recalculate + self._expectation.clear() + + # Compute expectation + for label,bkg in self._distributions.items(): + norm = self._norms[self.labels.index(label)] + self._expectation += bkg * norm * self._livetime + + # Cache. Regular copy is enough since norm values are float en not mutable + self._last_norm_values = self.norms.copy() + + if copy: + return self._expectation.copy() + else: + return self._expectation + + +class FreeNormBackgroundInterpolatedDensityTimeTagEmCDS(FreeNormBackground, BackgroundDensityInterface): + + @property + def event_type(self) -> Type[EventInterface]: + return TimeTagEmCDSEventInSCFrameInterface + + def __init__(self, + data:TimeTagEmCDSEventDataInSCFrameInterface, + distribution:Union[Histogram, Dict[str, Histogram]], + sc_history:SpacecraftHistory, + copy=True, + batch_size = 100000, + *args, + **kwargs): + + super().__init__(distribution, sc_history, + copy=copy, *args, **kwargs) + + # We need the density per phase space for the specific measurement units TimeTagEmCDSEventInSCFrameInterface + # Energy: keV + # Phi: rad + # PsiChi: sr (for the phase space. The axis is a HealpixAxis) + # Time: seconds (taken into account by the norm (a rate) unit) + + psichi_frame = None + + for label,dist in self._distributions.items(): + + dist = self._distributions[label] = dist.project('Em', 'Phi', 'PsiChi') + + dist.axes['Em'] = dist.axes['Em'].to(u.keV).to(None, copy=False, update=False) + dist.axes['Phi'] = dist.axes['Phi'].to(u.rad).to(None, copy=False, update=False) + + energy_phase_space = dist.axes['Em'].widths + phi_phase_space = dist.axes['Phi'].widths + psichi_phase_space = dist.axes['PsiChi'].pixarea().to_value(u.sr) + + if psichi_frame is None: + psichi_frame = dist.axes['PsiChi'].coordsys + else: + if psichi_frame != dist.axes['PsiChi'].coordsys: + raise ValueError("All PsiChi axes must be in the same frame") + + dist /= dist.axes.expand_dims(energy_phase_space, 'Em') + dist /= dist.axes.expand_dims(phi_phase_space, 'Phi') + dist /= psichi_phase_space + + # Compute the probabilities once and for all + # TODO: account for livetime + self._prob = [[] for _ in range(self.ncomponents)] + + for events_chunk in itertools_batched(data, batch_size): + + jd1, jd2, energy,phi, psichi_lon, psichi_lat = np.asarray([[ + event.jd1, + event.jd2, + event.energy_keV, + event.scattering_angle_rad, + event.scattered_lon_rad_sc, + event.scattered_lat_rad_sc] + for event in events_chunk], dtype=float).transpose() + + times = Time(jd1, jd2, format = 'jd') + + # Transform local to inertial + sc_psichi_coord = SkyCoord(psichi_lon, psichi_lat, unit=u.rad, frame=SpacecraftFrame()) + sc_psichi_vec = sc_psichi_coord.cartesian.xyz.value + attitudes = sc_history.interp_attitude(times).transform_to(psichi_frame) + inertial_psichi_vec = attitudes.rot.apply(sc_psichi_vec.transpose()) + inertial_psichi_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*inertial_psichi_vec.transpose())) + inertial_psichi_coord = SkyCoord(inertial_psichi_sph, frame = psichi_frame) + + for label,dist in self._distributions.items(): + prob = dist.interp(energy, phi, inertial_psichi_coord) + self._prob[self.labels.index(label)].extend(prob) + + self._prob = np.asarray(self._prob) + + def expected_counts(self) -> float: + """ + Total expected counts + """ + return self._livetime * self._norm + + def expectation_density(self) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. This equals the event probabiliy times the number of events + """ + + # Multiply each probability by the norm, and then sum + return np.tensordot(self._prob, self._norms, axes = (0,0)) + + + + + + + diff --git a/cosipy/data_io/BinnedData.py b/cosipy/data_io/BinnedData.py index 7a863b87..da958dfd 100644 --- a/cosipy/data_io/BinnedData.py +++ b/cosipy/data_io/BinnedData.py @@ -16,6 +16,11 @@ from cosipy.data_io import UnBinnedData import logging +import astropy.units as u +from astropy.coordinates import SkyCoord + +from cosipy.interfaces import BinnedDataInterface + logger = logging.getLogger(__name__) @@ -496,3 +501,40 @@ def get_raw_lightcurve(self, binned_data=None, output_name=None, show_plots=Fals d = {"Time[UTC]":self.time_bin_centers,"Rate[ct/s]":self.time_hist/self.time_bin_widths} df = pd.DataFrame(data=d) df.to_csv(f"{output_name}.dat",index=False,sep="\t",columns=["Time[UTC]","Rate[ct/s]"]) + + def get_em_cds(self): + return EmCDSBinnedData(self.binned_data.project('Em', 'Phi', 'PsiChi')) + +class EmCDSBinnedData(BinnedDataInterface): + """ + Measured energy (Em), Compton polar scattering angle (Phi), and the scattering direction (PsiChi). + Phi and PsiChi are the Compton Data Space (CDS). No time dependence + """ + def __init__(self, data:Histogram): + + # Checks + if set(data.axes.labels) != {'Em', 'Phi', 'PsiChi'}: + raise ValueError(f"Wrong axes. 'Em', 'Psi', 'PsiChi' expected.") + + if not data.axes['Em'].unit.is_equivalent(u.keV): + raise ValueError(f"Em axis should have units of energy") + + if not data.axes['Phi'].unit.is_equivalent(u.deg): + raise ValueError(f"Psi axis should have angle units") + + if not isinstance(data.axes['PsiChi'],HealpixAxis): + raise ValueError(f"PsiChi must be of type {HealpixAxis}.") + + if data.axes['PsiChi'].coordsys is None: + raise ValueError(f"PsiChi axes must have a coordinate system.") + + self._data = data + + @property + def data(self) -> Histogram: + return self._data + @property + def axes(self) -> Axes: + return self._data.axes + + diff --git a/cosipy/data_io/EmCDSUnbinnedData.py b/cosipy/data_io/EmCDSUnbinnedData.py new file mode 100644 index 00000000..bc59a09f --- /dev/null +++ b/cosipy/data_io/EmCDSUnbinnedData.py @@ -0,0 +1,262 @@ +from pathlib import Path +from typing import Iterable, Iterator, Optional, Tuple, Union, List + +import numpy as np +from astropy.coordinates import BaseCoordinateFrame, Angle, SkyCoord, UnitSphericalRepresentation +from astropy.time import Time +from astropy.units import Quantity +from numpy._typing import ArrayLike +from scoords import SpacecraftFrame + +from cosipy import UnBinnedData +from cosipy.interfaces import EventWithEnergyInterface, EventDataInterface, EventDataWithEnergyInterface +from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface +from cosipy.interfaces.event import TimeTagEmCDSEventInSCFrameInterface, \ + EmCDSEventInSCFrameInterface + +import astropy.units as u + +from cosipy.interfaces.event_selection import EventSelectorInterface + +class EmCDSEventInSCFrame(EmCDSEventInSCFrameInterface): + + _frame = SpacecraftFrame() + + def __init__(self, energy, scatt_angle, scatt_lon, scatt_lat, event_id = None): + """ + Parameters + ---------- + jd1: julian days + jd2: julian days + energy: keV + scatt_angle: scattering angle radians + scatt_lon: scattering longitude radians + scatt_lat: scattering latitude radians + """ + self._id = event_id + self._energy = energy + self._scatt_angle = scatt_angle + self._scatt_lat = scatt_lat + self._scatt_lon = scatt_lon + + @property + def id(self) -> int: + return self._id + + @property + def frame(self): + return self._frame + + @property + def energy_keV(self) -> float: + return self._energy + + @property + def scattering_angle_rad(self) -> float: + return self._scatt_angle + + @property + def scattered_lon_rad_sc(self) -> float: + return self._scatt_lon + + @property + def scattered_lat_rad_sc(self) -> float: + return self._scatt_lat + +class TimeTagEmCDSEventInSCFrame(EmCDSEventInSCFrame, TimeTagEmCDSEventInSCFrameInterface): + + def __init__(self, jd1, jd2, energy, scatt_angle, scatt_lon, scatt_lat, event_id=None): + """ + Parameters + ---------- + jd1: julian days + jd2: julian days + energy: keV + scatt_angle: scattering angle radians + scatt_lon: scattering longitude radians + scatt_lat: scattering latitude radians + """ + super().__init__(energy, scatt_angle, scatt_lon, scatt_lat, event_id) + + self._jd1 = jd1 + self._jd2 = jd2 + + @property + def jd1(self): + return self._jd1 + + @property + def jd2(self): + return self._jd2 + + + +class TimeTagEmCDSEventDataInSCFrameFromArrays(TimeTagEmCDSEventDataInSCFrameInterface): + """ + + """ + + _frame = SpacecraftFrame() + event_type = TimeTagEmCDSEventInSCFrame + + def __init__(self, + time:Time, + energy:Quantity, + scattering_angle:Angle, + scattered_direction:SkyCoord, + event_id:Optional[Iterable[int]] = None, + selection:EventSelectorInterface = None): + """ + + Parameters + ---------- + time + energy + scattering_angle + scattered_direction + event_id + selection + """ + + self._jd1 = time.jd1 + self._jd2 = time.jd2 + self._energy = energy.to_value(u.keV) + self._scatt_angle = scattering_angle.to_value(u.rad) + + if not isinstance(scattered_direction.frame, SpacecraftFrame): + raise ValueError("Coordinates need to be in SC frame") + + scattered_direction = scattered_direction.represent_as(UnitSphericalRepresentation) + + self._scatt_lat = scattered_direction.lat.rad + self._scatt_lon = scattered_direction.lon.rad + if event_id is None: + self._id = np.arange(self._jd1.size) + else: + self._id = np.asarray(event_id) + + # Check size + self._id, self._jd1, self._jd2, self._energy, self._scatt_angle, self._scatt_lat, self._scatt_lon = np.broadcast_arrays(self._id, self._jd1, self._jd2, self._energy, self._scatt_angle, self._scatt_lat, self._scatt_lon) + + self._nevents = self._id.size + + if selection is not None: + # Apply selection once and for all + new_id = [] + new_jd1 = [] + new_jd2 = [] + new_energy = [] + new_scatt_angle = [] + new_scatt_lat = [] + new_scatt_lon = [] + + nevents = 0 + for event in selection(self): + new_id.append(event.id) + new_jd1.append(event.jd1) + new_jd2.append(event.jd2) + new_energy.append(event.energy_keV) + new_scatt_angle.append(event.scattering_angle_rad) + new_scatt_lat.append(event.scattered_lat_rad_sc) + new_scatt_lon.append(event.scattered_lon_rad_sc) + nevents += 1 + + self._nevents = nevents + + self._id = np.asarray(new_id) + self._jd1 = np.asarray(new_jd1) + self._jd2 = np.asarray(new_jd2) + self._energy = np.asarray(new_energy) + self._scatt_angle = np.asarray(new_scatt_angle) + self._scatt_lat = np.asarray(new_scatt_lat) + self._scatt_lon = np.asarray(new_scatt_lon) + + def __getitem__(self, i: int) -> TimeTagEmCDSEventInSCFrameInterface: + return TimeTagEmCDSEventInSCFrame(self._jd1[i], self._jd2[i], self._energy[i], self._scatt_angle[i], self._scatt_lon[i], self._scatt_lat[i], + self._id[i]) + + @property + def nevents(self) -> int: + return self._nevents + + def __iter__(self) -> Iterator[TimeTagEmCDSEventInSCFrameInterface]: + for id, jd1, jd2, energy, scatt_angle, scatt_lat, scatt_lon in zip(self._id, self._jd1, self._jd2, self._energy, self._scatt_angle, self._scatt_lat, self._scatt_lon): + yield TimeTagEmCDSEventInSCFrame(jd1, jd2, energy, scatt_angle, scatt_lon, scatt_lat, id) + + @property + def frame(self) -> SpacecraftFrame: + return self._frame + + @property + def ids(self) -> Iterable[int]: + return self._id + + @property + def jd1(self) -> Iterable[float]: + return self._jd1 + + @property + def jd2(self) -> Iterable[float]: + return self._jd2 + + @property + def energy_keV(self) -> Iterable[float]: + return self._energy + + @property + def scattering_angle_rad(self) -> Iterable[float]: + return self._scatt_angle + + @property + def scattered_lon_rad_sc(self) -> Iterable[float]: + return self._scatt_lon + + @property + def scattered_lat_rad_sc(self) -> Iterable[float]: + return self._scatt_lat + +class TimeTagEmCDSEventDataInSCFrameFromDC3Fits(TimeTagEmCDSEventDataInSCFrameFromArrays): + + def __init__(self, data_path: Union[Path, List[Path]], + selection:EventSelectorInterface = None): + + time = np.empty(0) + energy = np.empty(0) + phi = np.empty(0) + psi = np.empty(0) + chi = np.empty(0) + + if isinstance(data_path, (str, Path)): + data_path = [Path(data_path)] + + for file in data_path: + # get_dict_from_fits is really a static method, no config file needed + data_dict = UnBinnedData.get_dict_from_fits(None, str(file)) + + time = np.append(time, data_dict['TimeTags']) + energy = np.append(energy, data_dict['Energies']) + phi = np.append(phi, data_dict['Phi']) + psi = np.append(psi, data_dict['Psi local']) + chi = np.append(chi, data_dict['Chi local']) + + # Time sort + tsort = np.argsort(time) + + time = time[tsort] + energy = energy[tsort] + phi = phi[tsort] + psi = psi[tsort] + chi = chi[tsort] + + time = Time(time, format='unix') + energy = u.Quantity(energy, u.keV) + phi = Angle(phi, u.rad) + # Psi is colatitude (latitude complementary angle) + psichi = SkyCoord(chi, np.pi / 2 - psi, unit=u.rad, + frame=SpacecraftFrame()) + + super().__init__(time, energy, phi, psichi, selection = selection) + + + + diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index e3a6190e..a1cbb0cf 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -19,8 +19,8 @@ from scoords import Attitude, SpacecraftFrame import cosipy +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.data_io import DataIO -from cosipy.spacecraftfile import SpacecraftFile import logging logger = logging.getLogger(__name__) @@ -472,10 +472,10 @@ def instrument_pointing(self, ori_file): """ # Get ori info: - ori = SpacecraftFile.parse_from_file(ori_file) - time_tags = ori.get_time().to_value(format="unix") - x_pointings = ori.x_pointings - z_pointings = ori.z_pointings + ori = SpacecraftHistory.open(self.ori_file) + time_tags = ori.obstime.to_value(format="unix") + + x_pointings, _, z_pointings = ori.attitude.as_axes() # Interpolate: self.xl_interp = interpolate.interp1d(time_tags, x_pointings.l.rad, kind='linear') @@ -905,10 +905,10 @@ def cut_SAA_events(self, unbinned_data=None, output_name=None): self.cosi_dataset = self.get_dict(unbinned_data) # Get orientation info - ori = SpacecraftFile.parse_from_file(self.ori_file) + ori = SpacecraftHistory.open(self.ori_file) # Get bad time intervals - bti = self.find_bad_intervals(ori._time, ori.livetime) + bti = self.find_bad_intervals(ori.obstime, ori.livetime) # Get indices for good photons time_keep_index = self.filter_good_data(self.cosi_dataset['TimeTags'], bti) diff --git a/cosipy/data_io/__init__.py b/cosipy/data_io/__init__.py index 9e6394fe..8401a612 100644 --- a/cosipy/data_io/__init__.py +++ b/cosipy/data_io/__init__.py @@ -1,4 +1,4 @@ from .DataIO import DataIO from .UnBinnedData import UnBinnedData -from .BinnedData import BinnedData +from .BinnedData import BinnedData, EmCDSBinnedData from .ReadTraTest import ReadTraTest diff --git a/cosipy/event_selection/time_selection.py b/cosipy/event_selection/time_selection.py new file mode 100644 index 00000000..897bfc8a --- /dev/null +++ b/cosipy/event_selection/time_selection.py @@ -0,0 +1,141 @@ +import logging +logger = logging.getLogger(__name__) + +import itertools +from typing import Union, Iterable + +import numpy as np +from astropy.time import Time + +from cosipy.interfaces import TimeTagEventInterface, EventInterface +from cosipy.interfaces.event_selection import EventSelectorInterface +from cosipy.util.iterables import itertools_batched + + +class TimeSelector(EventSelectorInterface): + + def __init__(self, tstart:Time = None, tstop:Time = None, batch_size:int = 10000): + """ + Assumes events are time-ordered + + Selects events that fall within ANY of the time intervals defined by + corresponding pairs of (tstart, tstop). + + Valid combinations: + - (None, None): No time constraints + - (Scalar, None): Single lower bound only + - (None, Scalar): Single upper bound only + - (Scalar, Scalar): Single time interval + - (List, List): Multiple time intervals (same length required) + + Parameters + ---------- + tstart: Time, scalar Time, or None + Start time(s). If list, tstop must also be a list of same length. + tstop: Time, scalar Time, or None + Stop time(s). If list, tstart must also be a list of same length. + batch_size: int, default 10000 + Number of events to process at once + """ + if tstart is not None and tstop is not None: + if not tstart.isscalar == tstop.isscalar: + logger.error("tstart and tstop must both be scalar or both be list.") + raise ValueError + + elif tstart is None and tstop is not None: + if tstop.isscalar == False: + logger.error("When tstart is None, tstop must not be a list.") + raise ValueError + + elif tstart is not None and tstop is None: + if tstart.isscalar == False: + logger.error("When tstop is None, tstart must not be a list.") + raise ValueError + + # tstart is None and tstop is None -> OK. + + # Convert scalars to lists for uniform processing + if tstart is not None and tstart.isscalar == True: + tstart = Time([tstart]) + + if tstop is not None and tstop.isscalar == True: + tstop = Time([tstop]) + + # length check + if tstart is not None and tstop is not None: + if len(tstart) != len(tstop): + logger.error(f"tstart and tstop must have same length.") + raise ValueError + + self._tstart_list = tstart + self._tstop_list = tstop + + self._batch_size = batch_size + + @classmethod + def from_gti(cls, gti, batch_size:int = 10000): + """ + Instantiate a multi time selector from good time intervals. + + Parameters + ---------- + gti: + Good time intervals object with tstart_list and tstop_list attributes + batch_size: int + Number of events to process at once + """ + tstart_list = gti.tstart_list + tstop_list = gti.tstop_list + + selector = cls(tstart_list, tstop_list, batch_size) + + return selector + + def _select(self, event:TimeTagEventInterface) -> bool: + # Single event + return next(iter(self.select([event]))) + + def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterface]]) -> Union[bool, Iterable[bool]]: + + if isinstance(events, EventInterface): + # Single event + return self._select(events) + else: + # Multiple + + # Working in chunks/batches. + # This can optimized based on the system + + for chunk in itertools_batched(events, self._batch_size): + + jd1 = [] + jd2 = [] + + for event in chunk: + jd1.append(event.jd1) + jd2.append(event.jd2) + + time = Time(jd1, jd2, format = 'jd') + + if self._tstart_list is None and self._tstop_list is None: + result = np.ones(len(time), dtype=bool) + + elif self._tstart_list is None: + result = time <= self._tstop_list[0] + + elif self._tstop_list is None: + result = time > self._tstart_list[0] + + else: + indices = np.searchsorted(self._tstart_list, time, side='right') - 1 + valid = (indices >= 0) & (indices < len(self._tstop_list)) + result = np.zeros(len(time), dtype=bool) + result[valid] = time[valid] <= self._tstop_list[indices[valid]] + + for sel in result: + yield sel + + if self._tstop_list is not None and len(time) > 0: + if time[-1] > self._tstop_list[-1]: + # Stop further loading of event + return diff --git a/cosipy/image_deconvolution/scatt_exposure_table.py b/cosipy/image_deconvolution/scatt_exposure_table.py index 5a7caa5f..0869cd69 100644 --- a/cosipy/image_deconvolution/scatt_exposure_table.py +++ b/cosipy/image_deconvolution/scatt_exposure_table.py @@ -1,4 +1,7 @@ import logging + +from setuptools.command.easy_install import easy_install + logger = logging.getLogger(__name__) from tqdm.autonotebook import tqdm @@ -135,7 +138,7 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, altitude_list = [] if start is not None and stop is not None: - orientation = orientation.source_interval(start, stop) + orientation = orientation.select_interval(start, stop) elif start is not None: logger.error("please specify the stop time") return @@ -143,24 +146,28 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, logger.error("please specify the start time") return - ori_time = orientation.get_time() + ori_time = orientation.obstime logger.info(f'duration: {(ori_time[-1] - ori_time[0]).to("day")}') - attitude = orientation.get_attitude()[:-1] + attitude = orientation.attitude[:-1] pointing_list = attitude.transform_to("galactic").as_axes() n_pointing = len(pointing_list[0]) - - l_x = orientation.x_pointings.l.value[:-1] - b_x = orientation.x_pointings.b.value[:-1] - l_z = orientation.z_pointings.l.value[:-1] - b_z = orientation.z_pointings.b.value[:-1] + x_pointings, _, z_pointings = orientation.attitude.as_axes() + + l_x = x_pointings.l.value[:-1] + b_x = x_pointings.b.value[:-1] + + l_z = z_pointings.l.value[:-1] + b_z = z_pointings.b.value[:-1] + + earth_zenith_coord = orientation.earth_zenith.transform_to('galactic') - earth_zenith_l = orientation.earth_zenith.l.value[:-1] - earth_zenith_b = orientation.earth_zenith.b.value[:-1] + earth_zenith_l = earth_zenith_coord.l.value[:-1] + earth_zenith_b = earth_zenith_coord.b.value[:-1] if scheme == 'ring': nest = False @@ -173,8 +180,8 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, idx_x = hp.ang2pix(nside, l_x, b_x, nest=nest, lonlat=True) idx_z = hp.ang2pix(nside, l_z, b_z, nest=nest, lonlat=True) - livetime = orientation.livetime - altitude = orientation.get_altitude()[:-1] + livetime = orientation.livetime.to_value(u.s) + altitude = orientation.location.spherical.distance[:-1].to_value(u.km) for i in tqdm(range(n_pointing)): diff --git a/cosipy/image_deconvolution/time_binned_exposure_table.py b/cosipy/image_deconvolution/time_binned_exposure_table.py index 60a3086c..f54dc075 100644 --- a/cosipy/image_deconvolution/time_binned_exposure_table.py +++ b/cosipy/image_deconvolution/time_binned_exposure_table.py @@ -1,4 +1,7 @@ import logging + +from astropy.time import Time + logger = logging.getLogger(__name__) from tqdm.autonotebook import tqdm @@ -97,27 +100,31 @@ def from_orientation(cls, orientation, tstart_list, tstop_list, **kwargs): for time_binning_index, (tstart, tstop) in enumerate(zip(tstart_list, tstop_list)): - this_orientation = orientation.source_interval(tstart, tstop) + this_orientation = orientation.select_interval(tstart, tstop) time_binning_indices.append(time_binning_index) - attitude = this_orientation.get_attitude()[:-1] + attitude = this_orientation.attitude[:-1] pointing_list = attitude.transform_to("galactic").as_axes() n_pointing = len(pointing_list[0]) - - l_x = this_orientation.x_pointings.l.value[:-1] - b_x = this_orientation.x_pointings.b.value[:-1] - l_z = this_orientation.z_pointings.l.value[:-1] - b_z = this_orientation.z_pointings.b.value[:-1] + x_pointings, _, z_pointings = this_orientation.attitude.as_axes() + + l_x = x_pointings.l.value[:-1] + b_x = x_pointings.b.value[:-1] + + l_z = z_pointings.l.value[:-1] + b_z = z_pointings.b.value[:-1] + + earth_zenith_coord = this_orientation.earth_zenith.transform_to('galactic') - earth_zenith_l = this_orientation.earth_zenith.l.value[:-1] - earth_zenith_b = this_orientation.earth_zenith.b.value[:-1] + earth_zenith_l = earth_zenith_coord.l.value[:-1] + earth_zenith_b = earth_zenith_coord.b.value[:-1] - livetime = this_orientation.livetime - altitude = this_orientation.get_altitude()[:-1] + livetime = this_orientation.livetime.to_value(u.s) + altitude = this_orientation.location.spherical.distance[:-1].to_value(u.km) # appending the value livetimes.append(livetime) diff --git a/cosipy/interfaces/__init__.py b/cosipy/interfaces/__init__.py new file mode 100644 index 00000000..723a3e78 --- /dev/null +++ b/cosipy/interfaces/__init__.py @@ -0,0 +1,9 @@ +from .event import * +from .data_interface import * +from .background_interface import * +from .likelihood_interface import * +from .expectation_interface import * +from .source_response_interface import * +from .threeml_plugin_interface import * +from .instrument_response_interface import * + diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py new file mode 100644 index 00000000..434163d0 --- /dev/null +++ b/cosipy/interfaces/background_interface.py @@ -0,0 +1,37 @@ +from typing import Protocol, runtime_checkable, Dict, Any, Union +import histpy +import numpy as np + +import logging + +import astropy.units as u + +from astromodels import Parameter + +logger = logging.getLogger(__name__) + +from .expectation_interface import BinnedExpectationInterface, ExpectationDensityInterface, ExpectationInterface + +__all__ = [ + "BackgroundInterface", + "BinnedBackgroundInterface", + "BackgroundDensityInterface", + ] + +@runtime_checkable +class BackgroundInterface(ExpectationInterface, Protocol): + def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None:... + @property + def parameters(self) -> Dict[str, u.Quantity]:... + +@runtime_checkable +class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface, Protocol): + """ + No new methods, just the inherited one + """ + +@runtime_checkable +class BackgroundDensityInterface(BackgroundInterface, ExpectationDensityInterface, Protocol): + """ + No new methods, just the inherited one + """ diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py new file mode 100644 index 00000000..4834c056 --- /dev/null +++ b/cosipy/interfaces/data_interface.py @@ -0,0 +1,191 @@ +import itertools +from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union, Sequence, Iterable, ClassVar + +import numpy as np +from astropy.coordinates import BaseCoordinateFrame, Angle, SkyCoord +from astropy.units import Unit, Quantity +import astropy.units as u +from scoords import SpacecraftFrame + +from . import EventWithEnergyInterface +from .event import EventInterface, TimeTagEventInterface, \ + ComptonDataSpaceInSCFrameEventInterface, TimeTagEmCDSEventInSCFrameInterface, EventWithScatteringAngleInterface, \ + EmCDSEventInSCFrameInterface +from histpy import Histogram, Axes + +from astropy.time import Time + +# Guard to prevent circular import +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from .event_selection import EventSelectorInterface + +import histpy + +__all__ = ["DataInterface", + "EventDataInterface", + "BinnedDataInterface", + "TimeTagEventDataInterface", + "EventDataWithEnergyInterface" + ] + +@runtime_checkable +class DataInterface(Protocol): + pass + +@runtime_checkable +class BinnedDataInterface(DataInterface, Protocol): + @property + def data(self) -> Histogram:... + @property + def axes(self) -> Axes:... + def fill(self, event_data:Iterable[EventInterface]): + """ + Bin the data. + + Parameters + ---------- + event_data + + Returns + ------- + + """ + +@runtime_checkable +class EventDataInterface(DataInterface, Protocol): + + # Type returned by __iter__ + event_type = ClassVar[Type[EventInterface]] + + def __iter__(self) -> Iterator[EventInterface]: + """ + Return one Event at a time + """ + + def __getitem__(self, item: int) -> EventInterface: + """ + Convenience method. Pretty slow in general. It's suggested that + the implementations override it + """ + return next(itertools.islice(self, item, None)) + + @property + def nevents(self) -> int: + """ + Total number of events yielded by __iter__ + + Convenience method. Pretty slow in general. It's suggested that + the implementations override it + """ + return sum(1 for _ in iter(self)) + + @property + def id(self) -> Iterable[int]: + return [e.id for e in self] + +@runtime_checkable +class TimeTagEventDataInterface(EventDataInterface, Protocol): + + event_type = TimeTagEventInterface + + def __iter__(self) -> Iterator[TimeTagEventInterface]:... + + @property + def jd1(self) -> Iterable[float]: + return [e.jd1 for e in self] + + @property + def jd2(self) -> Iterable[float]: + return [e.jd2 for e in self] + + @property + def time(self) -> Time: + """ + Add fancy time + """ + return Time(self.jd1, self.jd2, format = 'jd') + +@runtime_checkable +class EventDataWithEnergyInterface(EventDataInterface, Protocol): + + event_type = EventWithEnergyInterface + + def __iter__(self) -> Iterator[EventWithEnergyInterface]:... + + @property + def energy_keV(self) -> Iterable[float]: + return [e.energy_keV for e in self] + + @property + def energy(self) -> Quantity: + """ + Add fancy energy quantity + """ + return Quantity(self.energy_keV, u.keV) + + +@runtime_checkable +class EventDataWithScatteringAngleInterface(EventDataInterface, Protocol): + + event_type = EventWithScatteringAngleInterface + + def __iter__(self) -> Iterator[EventWithScatteringAngleInterface]:... + + @property + def scattering_angle_rad(self) -> Iterable[float]: + return [e.scattering_angle_rad for e in self] + + @property + def scattering_angle(self) -> Angle: + """ + Add fancy energy quantity + """ + return Angle(self.scattering_angle_rad, u.rad) + +@runtime_checkable +class ComptonDataSpaceInSCFrameEventDataInterface(EventDataWithScatteringAngleInterface, Protocol): + + event_type = ComptonDataSpaceInSCFrameEventInterface + + def __iter__(self) -> Iterator[ComptonDataSpaceInSCFrameEventInterface]:... + + @property + def scattered_lon_rad_sc(self) -> Iterable[float]: + return [e.scattered_lon_rad_sc for e in self] + + @property + def scattered_lat_rad_sc(self) -> Iterable[float]: + return [e.scattered_lat_rad_sc for e in self] + + @property + def scattered_direction_sc(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.scattered_lon_rad_sc, + self.scattered_lat_rad_sc, + unit = u.rad, + frame = SpacecraftFrame()) + +@runtime_checkable +class EventDataInSCFrameInterface(EventDataInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + +@runtime_checkable +class EmCDSEventDataInSCFrameInterface(EventDataWithEnergyInterface, ComptonDataSpaceInSCFrameEventDataInterface, Protocol): + + event_type = EmCDSEventInSCFrameInterface + + def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: ... + +@runtime_checkable +class TimeTagEmCDSEventDataInSCFrameInterface(TimeTagEventDataInterface, + EmCDSEventDataInSCFrameInterface, + Protocol): + + event_type = TimeTagEmCDSEventInSCFrameInterface + + def __iter__(self) -> Iterator[TimeTagEmCDSEventInSCFrameInterface]:... diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py new file mode 100644 index 00000000..5a0e1a60 --- /dev/null +++ b/cosipy/interfaces/event.py @@ -0,0 +1,123 @@ +from abc import ABC, abstractmethod +from symtable import Class +from typing import Sequence, Union, Protocol, ClassVar + +import numpy as np +from astropy.coordinates import Angle, SkyCoord, BaseCoordinateFrame +from scoords import SpacecraftFrame +from typing_extensions import runtime_checkable + +from astropy.time import Time +from astropy.units import Quantity, Unit +import astropy.units as u + +__all__ = [ + "EventInterface", + "TimeTagEventInterface", + "EventWithEnergyInterface", +] + +@runtime_checkable +class EventInterface(Protocol): + """ + Derived classes implement all accessors + """ + + # This makes sure that all PDFs have the same units + data_space_units = ClassVar[Union[u.Unit, None]] + + @property + def id(self) -> int: + """ + Typically set by the main data loader or source. + + No necessarily in sequential order + """ + +@runtime_checkable +class TimeTagEventInterface(EventInterface, Protocol): + + data_space_units = u.s + + @property + def jd1(self) -> float:... + + @property + def jd2(self) -> float:... + + @property + def time(self) -> Time: + """ + Add fancy time + """ + return Time(self.jd1, self.jd2, format = 'jd') + +@runtime_checkable +class EventWithEnergyInterface(EventInterface, Protocol): + + data_space_units = u.keV + + @property + def energy_keV(self) -> float:... + + @property + def energy(self) -> Quantity: + """ + Add fancy energy quantity + """ + return Quantity(self.energy_keV, u.keV) + +@runtime_checkable +class EventWithScatteringAngleInterface(EventInterface, Protocol): + + data_space_units = u.rad + + @property + def scattering_angle_rad(self) -> float: ... + + + @property + def scattering_angle(self) -> Angle: + """ + Add fancy energy quantity + """ + return Angle(self.scattering_angle_rad, u.rad) + + +@runtime_checkable +class ComptonDataSpaceInSCFrameEventInterface(EventWithScatteringAngleInterface, Protocol): + + data_space_units = EventWithScatteringAngleInterface.data_space_units * u.sr + + @property + def scattered_lon_rad_sc(self) -> float: ... + + @property + def scattered_lat_rad_sc(self) -> float: ... + + @property + def scattered_direction_sc(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.scattered_lon_rad_sc, + self.scattered_lat_rad_sc, + unit=u.rad, + frame=SpacecraftFrame()) + +@runtime_checkable +class EmCDSEventInSCFrameInterface(EventWithEnergyInterface, + ComptonDataSpaceInSCFrameEventInterface, + Protocol): + data_space_units = ComptonDataSpaceInSCFrameEventInterface.data_space_units * EventWithEnergyInterface.data_space_units + +@runtime_checkable +class TimeTagEmCDSEventInSCFrameInterface(TimeTagEventInterface, + EmCDSEventInSCFrameInterface, + Protocol): + data_space_units = EmCDSEventInSCFrameInterface.data_space_units * TimeTagEventInterface.data_space_units + + + + + diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py new file mode 100644 index 00000000..81734e77 --- /dev/null +++ b/cosipy/interfaces/event_selection.py @@ -0,0 +1,38 @@ +import itertools +from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional, \ + Tuple + +from . import EventInterface + +@runtime_checkable +class EventSelectorInterface(Protocol): + + def select(self, event:Union[EventInterface, Iterable[EventInterface]]) -> Union[bool, Iterable[bool]]: + """ + True to keep an event + + Return a single value for a single Event. + As many values for an Iterable of events + """ + + def mask(self, events: Iterable[EventInterface]) -> Iterable[Tuple[bool,EventInterface]]: + """ + Returns an iterable of tuples. Each tuple has 2 elements: + - First: True to keep an event, False to filter it out. + - Second: the event itself. + """ + events1, events2 = itertools.tee(events, 2) + for selected, event in zip(self.select(events1), events2): + yield selected, event + + def __call__(self, events: Iterable[EventInterface]) -> Union[Iterable[EventInterface], None]: + """ + Skips events that were not selected + + Returning None raises StopIteration + """ + for selected,event in self.mask(events): + if selected: + yield event + + diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py new file mode 100644 index 00000000..8893b200 --- /dev/null +++ b/cosipy/interfaces/expectation_interface.py @@ -0,0 +1,143 @@ +import operator +from typing import Protocol, runtime_checkable, Dict, Any, Generator, Iterable, Optional, Union, Iterator, ClassVar, \ + Type, Tuple + +import histpy +import numpy as np +from histpy import Axes + +from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface, EventInterface + +__all__ = [ + "ExpectationDensityInterface", + "BinnedExpectationInterface" + ] + + +@runtime_checkable +class ExpectationInterface(Protocol): + pass + +@runtime_checkable +class BinnedExpectationInterface(ExpectationInterface, Protocol): + def expectation(self, axes:Axes, copy: Optional[bool])->histpy.Histogram: + """ + + Parameters + ---------- + axes: + Axes to bin the expectation into + copy: + If True (default), it will return an array that the user if free to modify. + Otherwise, it will result a reference, possible to the cache, that + the user should not modify + + Returns + ------- + + """ + +@runtime_checkable +class ExpectationDensityInterface(ExpectationInterface, Protocol): + """ + This interface doesn't take an EventDataInterface or Iterable[EventInterface] + because that would complicate caching. The stream of events is assumed + constant after selection. + """ + + # The event class that the instance handles + @property + def event_type(self) -> Type[EventInterface]: + """ + The event class that the implementation can handle + """ + + def expected_counts(self) -> float: + """ + Total expected counts + """ + + def event_probability(self) -> Iterable[float]: + """ + Return the probability of obtaining the observed set of measurement of each event, + given that the event was detected. It equals the expectation density times ncounts + + The units of the output the inverse of the phase space of the event_type data space. + e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV + + This is provided as a helper function assuming the child classes implemented expectation_density + """ + + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if ( + cls.event_probability is ExpectationDensityInterface.event_probability + and + cls.expectation_density is ExpectationDensityInterface.expectation_density): + raise NotImplementedError("Implement event_probability and/or expectation_density") + + ncounts = self.expected_counts() + return [expectation/ncounts for expectation in self.expectation_density()] + + def expectation_density(self) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. This equals the event probabiliy times the number of events + + This is provided as a helper function assuming the child classes implemented event_probability + + Parameters + ---------- + start + stop + + Returns + ------- + + """ + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if ( + cls.event_probability is ExpectationDensityInterface.event_probability + and + cls.expectation_density is ExpectationDensityInterface.expectation_density): + raise NotImplementedError("Implement event_probability and/or expectation_density") + + ncounts = self.expected_counts() + return [prob*ncounts for prob in self.event_probability()] + +class SumExpectationDensity(ExpectationDensityInterface): + """ + Convenience class to sum multiple ExpectationDensityInterface implementation + """ + + def __init__(self, *expectations:Tuple[ExpectationDensityInterface, None]): + # Allow None for convenience, we should remove it + self._expectations = tuple(ex for ex in expectations if ex is not None) + + self._event_type = expectations[0].event_type + + for ex in expectations: + if ex.event_type is not self._event_type: + raise TypeError("All expectations should have the same event type") + + @property + def event_type(self) -> Type[EventInterface]: + """ + The event class that the implementation can handle + """ + return self._event_type + + def expected_counts(self) -> float: + """ + Total expected counts + """ + return sum(ex.expected_counts() for ex in self._expectations) + + def expectation_density(self) -> Iterable[float]: + + for exdensity in zip(*[ex.expectation_density() for ex in self._expectations]): + yield sum(exdensity) + + + diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py new file mode 100644 index 00000000..4d1812b9 --- /dev/null +++ b/cosipy/interfaces/instrument_response_interface.py @@ -0,0 +1,162 @@ +import itertools +import operator +from typing import Protocol, Union, Optional, Iterable, Tuple, runtime_checkable, ClassVar + +from astropy.coordinates import SkyCoord +from astropy.time import Time +from astropy.units import Quantity +from histpy import Axes, Histogram + +from astropy import units as u +from scoords import Attitude + +from cosipy.interfaces import BinnedDataInterface, ExpectationDensityInterface, BinnedExpectationInterface, EventInterface +from cosipy.interfaces.photon_list import PhotonListWithDirectionInterface +from cosipy.interfaces.photon_parameters import PhotonInterface, PhotonWithDirectionInterface +from cosipy.polarization import PolarizationAngle + +__all__ = ["BinnedInstrumentResponseInterface"] + +class BinnedInstrumentResponseInterface(BinnedExpectationInterface, Protocol): + + def differential_effective_area(self, + data: BinnedDataInterface, + direction: SkyCoord, + energy:u.Quantity, + polarization:PolarizationAngle, + attitude:Attitude, + weight: Union[Quantity, float], + out: Quantity, + add_inplace: bool) -> Quantity: + """ + + Parameters + ---------- + data: + Binned data + direction: + Photon incoming direction. If not in a SpacecraftFrame, then provide an attitude for the transformation + energy: + Photon energy + polarization + Photon polarization angle. If the coordinate frame of the polarization convention is not a + SpacecraftFrame, then provide an attitude for the transformation + attitude + Attitude defining the orientation of the SC in an inertial coordinate system. + weight + Optional. Weighting the result by a given weight. Providing the weight at this point as opposed to + apply it to the output can result in greater efficiency. + out + Optional. Histogram to store the output. If possible, the implementation should try to avoid allocating + new memory. + add_inplace + Optional. If True and a Histogram output was provided, the implementation should try to avoid allocating new + memory and add --not set-- the result of this operation to the output. + + Returns + ------- + The effective area times the event measurement probability distribution integrated on each of the bins + of the provided axes. It has the shape (direction.shape, energy.shape, polarization.shape, axes.shape) + """ + +@runtime_checkable +class InstrumentResponseFunctionInterface(Protocol): + + # The photon class and event class that the IRF implementation can handle + photon_type = ClassVar[PhotonInterface] + event_type = ClassVar[EventInterface] + + def event_probability(self, query: Iterable[Tuple[PhotonInterface, EventInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + + The units of the output the inverse of the phase space of the class event_type data space. + e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV + """ + + def random_events(self, photons:Iterable[PhotonInterface]) -> Iterable[EventInterface]: + """ + Return a stream of random events, one per photon + """ + +@runtime_checkable +class FarFieldInstrumentResponseFunctionInterface(InstrumentResponseFunctionInterface, Protocol): + + def effective_area_cm2(self, photons: Iterable[PhotonWithDirectionInterface]) -> Iterable[float]: + """ + + """ + + def differential_effective_area_cm2(self, query: Iterable[Tuple[PhotonWithDirectionInterface, EventInterface]]) -> Iterable[float]: + """ + Event probability multiplied by effective area + + This is provided as a helper function assuming the child classes implemented event_probability + """ + + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if (cls.differential_effective_area_cm2 is FarFieldInstrumentResponseFunctionInterface.differential_effective_area_cm2 + and + cls.event_probability is FarFieldInstrumentResponseFunctionInterface.event_probability): + raise NotImplementedError("Implement differential_effective_area_cm2 and/or event_probability") + + query1, query2 = itertools.tee(query, 2) + photon_query = [photon for photon,_ in query1] + + return map(operator.mul, self.effective_area_cm2(photon_query), self.event_probability(query2)) + + def event_probability(self, query: Iterable[Tuple[PhotonWithDirectionInterface, EventInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + + In the far field case it is the same as the differential_effective_area_cm2 divided by the effective area + + This is provided as a helper function assuming the child classes implemented differential_effective_area_cm2 + """ + + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if ( + cls.differential_effective_area_cm2 is FarFieldInstrumentResponseFunctionInterface.differential_effective_area_cm2 + and + cls.event_probability is FarFieldInstrumentResponseFunctionInterface.event_probability): + raise NotImplementedError("Implement differential_effective_area_cm2 and/or event_probability") + + query1, query2 = itertools.tee(query, 2) + photon_query = [photon for photon, _ in query1] + + return map(operator.truediv, self.differential_effective_area_cm2(query2), self.effective_area_cm2(photon_query)) + + + def effective_area(self, photons: Iterable[PhotonWithDirectionInterface]) -> Iterable[u.Quantity]: + """ + Convenience function + """ + for area_cm2 in self.effective_area_cm2(photons): + yield u.Quantity(area_cm2, u.cm*u.cm) + + def differential_effective_area(self, query: Iterable[Tuple[PhotonWithDirectionInterface, EventInterface]]) -> Iterable[u.Quantity]: + for area_cm2 in self.differential_effective_area(query): + yield u.Quantity(area_cm2, u.cm*u.cm) + + + + + + + + + + + + + + + + + + + + + diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py new file mode 100644 index 00000000..4c5e4fcb --- /dev/null +++ b/cosipy/interfaces/likelihood_interface.py @@ -0,0 +1,27 @@ +from typing import Protocol, runtime_checkable + +__all__ = ['LikelihoodInterface', + 'BinnedLikelihoodInterface', + 'UnbinnedLikelihoodInterface'] + +from .expectation_interface import ExpectationDensityInterface, BinnedExpectationInterface, ExpectationInterface +from .data_interface import BinnedDataInterface, DataInterface, EventDataInterface +from .background_interface import BackgroundDensityInterface, BinnedBackgroundInterface, BackgroundInterface + +@runtime_checkable +class LikelihoodInterface(Protocol): + def get_log_like(self) -> float:... + @property + def nobservations(self) -> int: + """For BIC and other statistics""" + +@runtime_checkable +class BinnedLikelihoodInterface(LikelihoodInterface, Protocol): + """ + + """ + +@runtime_checkable +class UnbinnedLikelihoodInterface(LikelihoodInterface, Protocol): + """ + """ diff --git a/cosipy/interfaces/photon_list.py b/cosipy/interfaces/photon_list.py new file mode 100644 index 00000000..a6cb752d --- /dev/null +++ b/cosipy/interfaces/photon_list.py @@ -0,0 +1,82 @@ +import itertools +from typing import Protocol, ClassVar, Type, Iterator, runtime_checkable, Iterable + +from astropy.coordinates import BaseCoordinateFrame, SkyCoord +from scoords import SpacecraftFrame + +from .photon_parameters import PhotonInterface, PhotonWithEnergyInterface + +import astropy.units as u + +@runtime_checkable +class PhotonListInterface(Protocol): + + # Type returned by __iter__ + photon_type = ClassVar[Type] + + def __iter__(self) -> Iterator[PhotonInterface]: + """ + Return one Event at a time + """ + def __getitem__(self, item: int) -> PhotonInterface: + """ + Convenience method. Pretty slow in general. It's suggested that + the implementations override it + """ + return next(itertools.islice(self, item, None)) + + @property + def nphotons(self) -> int: + """ + Total number of events yielded by __iter__ + + Convenience method. Pretty slow in general. It's suggested that + the implementations override it + """ + return sum(1 for _ in iter(self)) + +@runtime_checkable +class EventDataWithEnergyInterface(PhotonListInterface, Protocol): + + def __iter__(self) -> Iterator[PhotonWithEnergyInterface]:... + + @property + def energy_radians(self) -> Iterable[float]:... + + @property + def energy(self) -> u.Quantity: + """ + Add fancy energy quantity + """ + return u.Quantity(self.energy_radians, u.radians) + +@runtime_checkable +class PhotonListWithDirectionInterface(PhotonListInterface, Protocol): + + @property + def frame(self) -> BaseCoordinateFrame:... + + @property + def direction_lon_radians(self) -> Iterable[float]: ... + + @property + def direction_lat_radians(self) -> Iterable[float]: ... + + @property + def direction_direction(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.direction_lon_radians, + self.direction_lat_radians, + unit=u.rad, + frame=self.frame) + +@runtime_checkable +class PhotonListInSCFrameInterface(PhotonListInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + + + diff --git a/cosipy/interfaces/photon_parameters.py b/cosipy/interfaces/photon_parameters.py new file mode 100644 index 00000000..ce875ba2 --- /dev/null +++ b/cosipy/interfaces/photon_parameters.py @@ -0,0 +1,104 @@ +from typing import Protocol, runtime_checkable + +from astropy import units as u +from astropy.coordinates import BaseCoordinateFrame, SkyCoord +from scoords import SpacecraftFrame + +from cosipy.polarization import PolarizationConvention, PolarizationAngle, StereographicConvention + + +@runtime_checkable +class PhotonInterface(Protocol): + """ + Derived classes have all access methods + """ + +@runtime_checkable +class PhotonWithEnergyInterface(PhotonInterface, Protocol): + + @property + def energy_keV(self) -> float:... + + @property + def energy(self) -> u.Quantity: + """ + Add fancy energy quantity + """ + return u.Quantity(self.energy_keV, u.keV) + +@runtime_checkable +class PhotonWithDirectionInterface(PhotonInterface, Protocol): + + @property + def frame(self) -> BaseCoordinateFrame:... + + @property + def direction_lon_radians(self) -> float: ... + + @property + def direction_lat_radians(self) -> float: ... + + @property + def direction(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.direction_lon_radians, + self.direction_lat_radians, + unit=u.rad, + frame=self.frame) + +# TODO: change to follow the same idea as EventInterface: +# Remove PhotonWithDirectionInterface +# Remove PhotonInSCFrameInterface +# Move direction_lon_radians and direction_lat_radians to PhotonWithDirectionInSCFrameInterface. +# and add a _sc suffix +# I'll need to coordinate first with those who are implemented the responses. + +@runtime_checkable +class PhotonInSCFrameInterface(PhotonInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + +@runtime_checkable +class PhotonWithDirectionInSCFrameInterface(PhotonWithDirectionInterface, + PhotonInSCFrameInterface, Protocol): + pass + +@runtime_checkable +class PhotonWithDirectionAndEnergyInSCFrameInterface(PhotonWithDirectionInSCFrameInterface, + PhotonWithEnergyInterface, Protocol): + pass + +@runtime_checkable +class PolarizedPhotonInterface(Protocol): + + @property + def polarization_angle_rad(self) -> float: ... + + @property + def polarization_convention(self) -> PolarizationConvention:... + + @property + def polarization_angle(self) -> PolarizationAngle: + """ + This convenience function only makes sense for implementations + that couple with PhotonWithDirectionInterface + """ + raise NotImplementedError("This class does not implement the polarization_angle() convenience method.") + +@runtime_checkable +class PolarizedPhotonStereographicConventionInSCInterface(PolarizedPhotonInterface, PhotonInSCFrameInterface, Protocol): + + @property + def polarization_convention(self) -> PolarizationConvention: + return StereographicConvention() + +@runtime_checkable +class PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface(PhotonWithDirectionAndEnergyInSCFrameInterface, PolarizedPhotonStereographicConventionInSCInterface, Protocol): + + @property + def polarization_angle(self) -> PolarizationAngle: + return PolarizationAngle(self._pa * u.rad, self.direction, 'stereographic') + diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py new file mode 100644 index 00000000..ba2e488b --- /dev/null +++ b/cosipy/interfaces/source_response_interface.py @@ -0,0 +1,73 @@ +from typing import Protocol, runtime_checkable +from astromodels import Model +from astromodels.sources import Source + +from .expectation_interface import BinnedExpectationInterface, ExpectationDensityInterface + +from cosipy.spacecraftfile import SpacecraftHistory + +__all__ = ["ThreeMLModelFoldingInterface", + "UnbinnedThreeMLModelFoldingInterface", + "BinnedThreeMLModelFoldingInterface", + "ThreeMLSourceResponseInterface", + "UnbinnedThreeMLSourceResponseInterface", + "BinnedThreeMLSourceResponseInterface"] + +@runtime_checkable +class ThreeMLModelFoldingInterface(Protocol): + def set_model(self, model: Model): + """ + The model is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + +@runtime_checkable +class UnbinnedThreeMLModelFoldingInterface(ThreeMLModelFoldingInterface, ExpectationDensityInterface, Protocol): + """ + No new methods. Just the inherited ones. + """ + +@runtime_checkable +class BinnedThreeMLModelFoldingInterface(ThreeMLModelFoldingInterface, BinnedExpectationInterface, Protocol): + """ + No new methods. Just the inherited ones. + """ + +@runtime_checkable +class ThreeMLSourceResponseInterface(Protocol): + + def set_source(self, source: Source): + """ + The source is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + def copy(self) -> "ThreeMLSourceResponseInterface": + """ + This method is used to re-use the same object for multiple + sources. + It is expected to return a safe copy of itself + such that when + a new source is set, the expectation calculation + are independent. + + psr1 = ThreeMLSourceResponse() + psr2 = psr.copy() + psr1.set_source(source1) + psr2.set_source(source2) + """ + +@runtime_checkable +class UnbinnedThreeMLSourceResponseInterface(ThreeMLSourceResponseInterface, ExpectationDensityInterface, Protocol): + """ + No new methods. Just the inherited ones. + """ + +@runtime_checkable +class BinnedThreeMLSourceResponseInterface(ThreeMLSourceResponseInterface, BinnedExpectationInterface, Protocol): + """ + No new methods. Just the inherited ones. + """ + + diff --git a/cosipy/interfaces/threeml_plugin_interface.py b/cosipy/interfaces/threeml_plugin_interface.py new file mode 100644 index 00000000..f1590592 --- /dev/null +++ b/cosipy/interfaces/threeml_plugin_interface.py @@ -0,0 +1,112 @@ +from typing import Dict, Optional + +from cosipy.interfaces import ThreeMLModelFoldingInterface, BackgroundInterface +from cosipy.interfaces.likelihood_interface import LikelihoodInterface +from threeML import PluginPrototype, Parameter + +__all__ = ["ThreeMLPluginInterface"] + +class ThreeMLPluginInterface(PluginPrototype): + + def __init__(self, + name: str, + likelihood: LikelihoodInterface, + response:ThreeMLModelFoldingInterface, + bkg:Optional[BackgroundInterface] = None,): + """ + + Parameters + ---------- + name + likefun: str or LikelihoodInterface (Use at your own risk. make sure it knows about the input data, response and bkg) + """ + + # PluginPrototype.__init__ does the following: + # Sets _name = name + # Sets _tag = None + # Set self._nuisance_parameters, which we do not use because + # we're overriding nuisance_parameters() and update_nuisance_parameters() + super().__init__(name, {}) + + self._like = likelihood + self._response = response + self._bkg = bkg + + # Currently, the only nuisance parameters are the ones for the bkg + # We could have systematics here as well + if self._bkg is None: + self._threeml_bkg_parameters = {} + else: + # 1. Adds plugin name, required by 3ML code + # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 + # 2. Translation to bkg bare parameters. 3ML "Parameter" keeps track of a few more things than a "bare" (Quantity) parameter. + self._threeml_bkg_parameters = {self._add_prefix_name(label): Parameter(label, param.value, unit=param.unit) for label, param in self._bkg.parameters.items()} + + # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter + self.bkg_parameter = ThreeMLPluginInterface._Bkg_parameter(self) + + def _add_prefix_name(self, label): + return self._name + "_" + label + + def _remove_prefix_name(self, label): + return label[len(self._name) + 1:] + + @property + def nuisance_parameters(self) -> Dict[str, Parameter]: + # Currently, the only nuisance parameters are the ones for the bkg + # We could have systematics here as well + return self._threeml_bkg_parameters + + def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Parameter]): + # Currently, the only nuisance parameters are the ones for the bkg + # We could have systematics here as well + self._threeml_bkg_parameters = new_nuisance_parameters + + # Set underlying bkg model + self._update_bkg_parameters() + + def _update_bkg_parameters(self, name = None): + # 1. Remove plugin name. Opposite of the nuisance_parameters property + # 2. Convert to "bare" Quantity value + if self._bkg is not None: + if name is None: + #Update all + self._bkg.set_parameters(**{self._remove_prefix_name(label): parameter.as_quantity for label, parameter in + self._threeml_bkg_parameters.items()}) + else: + # Only specific value + self._bkg.set_parameters(**{name:self._threeml_bkg_parameters[self._add_prefix_name(name)].as_quantity}) + + class _Bkg_parameter: + # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter + def __init__(self, plugin): + self._plugin = plugin + def __getitem__(self, name): + # Adds plugin name, required by 3ML code + return self._plugin._threeml_bkg_parameters[self._plugin._add_prefix_name(name)] + def __setitem__(self, name, param: Parameter): + if param.name != self[name].name: + raise ValueError(f"Name of new set parameter need to match existing parameters ({param.name} != {self[name].name})") + self._plugin._threeml_bkg_parameters[self._plugin._add_prefix_name(name)] = param + self._plugin._update_bkg_parameters(name) + + + def get_number_of_data_points(self) -> int: + return self._like.nobservations + + def set_model(self, model): + self._response.set_model(model) + + def get_log_like(self): + # Update underlying background object in case the Parameter objects changed internally + self._update_bkg_parameters() + + return self._like.get_log_like() + + def inner_fit(self): + """ + Required for 3ML fit. + + Maybe in the future use fast norm fit to minimize the background normalization + """ + return self.get_log_like() diff --git a/cosipy/polarization/__init__.py b/cosipy/polarization/__init__.py index 08187a3b..5b8a163a 100644 --- a/cosipy/polarization/__init__.py +++ b/cosipy/polarization/__init__.py @@ -1,3 +1,3 @@ -from .polarization_asad import PolarizationASAD from .conventions import PolarizationConvention, OrthographicConvention, StereographicConvention, IAUPolarizationConvention from .polarization_angle import PolarizationAngle +from .polarization_axis import PolarizationAxis diff --git a/cosipy/polarization/conventions.py b/cosipy/polarization/conventions.py index a44eea01..80787097 100644 --- a/cosipy/polarization/conventions.py +++ b/cosipy/polarization/conventions.py @@ -1,5 +1,7 @@ +from typing import Union + import numpy as np -from astropy.coordinates import SkyCoord, Angle +from astropy.coordinates import SkyCoord, Angle, BaseCoordinateFrame, frame_transform_graph, ICRS import astropy.units as u import inspect from scoords import Attitude, SpacecraftFrame @@ -46,14 +48,50 @@ def get_convention(cls, name, *args, **kwargs): except KeyError as e: raise Exception(f"No polarization convention by name '{name}'") from e + def get_convention_registered_name(cls, convention_class): + """ + Opposite of get_convention. Returns None if not found. + """ + + if isinstance(convention_class, PolarizationConvention): + # If the user passed the instant instead of the class + convention_class = type(convention_class) + + for conv_name, conv_class in cls._registered_conventions.items(): + if conv_class is convention_class: + return conv_name + + # If not found + return None + @property def frame(self): """ Astropy coordinate frame """ return None - - def get_basis(self, source_direction: SkyCoord): + + def get_basis_local(self, source_vector: np.ndarray): + """ + Get the px,py unit vectors that define the polarization plane on + this convention, and in the convention's frame. + + Polarization angle increments from px to py. + + Parameters + ---------- + source_vector: np.ndarray + Unit cartesian vector. Shape (3,N) + + Returns + ------- + px,py : np.ndarray + Polarization angle increases from px to py. pz is always + the opposite of the source direction --i.e. in the direction of the + particle. + """ + + def get_basis(self, source_direction: SkyCoord, *args, **kwargs): """ Get the px,py unit vectors that define the polarization plane on this convention. Polarization angle increments from px to py. @@ -71,12 +109,26 @@ def get_basis(self, source_direction: SkyCoord): particle. """ + # To the convention's frame + source_vector = source_direction.transform_to(self.frame).cartesian.xyz + + # Bare basis + px,py = self.get_basis_local(source_vector) + + # To SkyCoord in the source frame + px = SkyCoord(*px, representation_type='cartesian', frame=self.frame).transform_to(source_direction.frame) + py = SkyCoord(*py, representation_type='cartesian', frame=self.frame).transform_to(source_direction.frame) + + return px, py + + # Orthographic projection convention class OrthographicConvention(PolarizationConvention): def __init__(self, - ref_vector: SkyCoord = None, + ref_vector: Union[SkyCoord, np.ndarray[float]] = None, + frame:Union[BaseCoordinateFrame, None] = None, clockwise: bool = False): """ The local polarization x-axis points towards an arbitrary reference vector, @@ -85,21 +137,39 @@ def __init__(self, Parameters ---------- - ref_vector : SkyCoord + ref_vector : Union[SkyCoord, np.ndarray[float]] Set the reference vector, defaulting to celestial north if not provided - (IAU convention) + (IAU convention). Alternatively, pass the cartesian representation and set a frame. + frame : BaseCoordinateFrame + Only used if ref_vector is a bare cartesian vector. Default: ICRS clockwise : bool Direction of increasing PA, when looking at the source. Default is false --i.e. counter-clockwise when looking outwards. """ + + if frame is None: + frame = ICRS + if ref_vector is None: - self.ref_vector = SkyCoord(ra=0 * u.deg, dec=90 * u.deg, frame="icrs") + self._ref_vector = np.asarray([0,0,1]) + self._frame = frame else: - self.ref_vector = ref_vector + if isinstance(ref_vector, SkyCoord): + self._ref_vector = ref_vector.cartesian.xyz + self._frame = ref_vector.frame + else: + self._ref_vector = ref_vector + self._frame = frame + + if not isinstance(self._frame, BaseCoordinateFrame): + self._frame = frame_transform_graph.lookup_name(self._frame) self._sign = 1 if clockwise else -1 + def ref_vector(self): + return SkyCoord(self._ref_vector, representation_type = 'cartesian', frame = self.frame) + def __repr__(self): return f"" @@ -112,15 +182,14 @@ def is_clockwise(self): @property def frame(self): - return self.ref_vector.frame + return self._frame - def get_basis(self, source_direction: SkyCoord): + def get_basis_local(self, source_vector: np.ndarray): # Extract Cartesian coordinates for the source direction. - pz = self._sign * source_direction.transform_to(self.frame).cartesian.xyz + pz = self._sign * source_vector[2] # Broadcast reference vector - ref = np.expand_dims(self.ref_vector.cartesian.xyz, - axis = tuple(np.arange(1,pz.ndim, dtype = int))) + ref = np.expand_dims(self._ref_vector, axis = tuple(np.arange(1,pz.ndim, dtype = int))) # Get py. Normalize because pz and ref dot not make 90deg angle py = np.cross(pz, ref, axisa = 0, axisb = 0, axisc = 0) @@ -128,16 +197,39 @@ def get_basis(self, source_direction: SkyCoord): # Get px px = np.cross(py, pz, axisa = 0, axisb = 0, axisc = 0) - - # To SkyCoord - px = SkyCoord(*px, representation_type='cartesian', frame = self.frame) - py = SkyCoord(*py, representation_type='cartesian', frame = self.frame) - + return px, py +class ConventionInSpacecraftFrameMixin: + """ + Checks for a frame with attitude + + Sub-classes need _frame property, and be sub-classes of PolarizationConvention + """ + + def get_basis(self, source_direction: SkyCoord, attitude=None): + """ + + Parameters + ---------- + source_direction + attitude: This overrides the object frame! + + Returns + ------- + + """ + if self._frame is None and attitude is None: + raise RuntimeError("You need to pass an attitude to convert between local and inertial coordinates") + + if attitude is not None: + self._frame = SpacecraftFrame(attitude=attitude) -#https://github.com/zoglauer/megalib/blob/1eaad14c51ec52ad1cb2399a7357fe2ca1074f79/src/cosima/src/MCSource.cc#L3452 -class MEGAlibRelative(OrthographicConvention): + return super().get_basis(source_direction) + + + #https://github.com/zoglauer/megalib/blob/1eaad14c51ec52ad1cb2399a7357fe2ca1074f79/src/cosima/src/MCSource.cc#L3452 +class MEGAlibRelative(OrthographicConvention, ConventionInSpacecraftFrameMixin): def __init__(self, axis, attitude = None): """ @@ -155,20 +247,19 @@ def __init__(self, axis, attitude = None): axis = axis.lower() if axis == 'x': - ref_vector = SkyCoord(lon=0 * u.deg, lat=0 * u.deg, - frame = SpacecraftFrame(attitude = attitude)) + ref_vector = np.asarray([1,0,0]) elif axis == 'y': - ref_vector = SkyCoord(lon=90 * u.deg, lat=0 * u.deg, - frame = SpacecraftFrame(attitude = attitude)) + ref_vector = np.asarray([0,1,0]) elif axis == 'z': - ref_vector = SkyCoord(lon=0 * u.deg, lat=90 * u.deg, - frame = SpacecraftFrame(attitude = attitude)) + ref_vector = np.asarray([0,0,1]) else: raise ValueError("Axis must be 'x', 'y' or 'z'.") - - super().__init__(ref_vector, clockwise = False) + + frame = SpacecraftFrame(attitude = attitude) + + super().__init__(ref_vector, frame = frame, clockwise = False) - def get_basis(self, source_direction: SkyCoord): + def get_basis_local(self, source_vector: np.ndarray): # The MEGAlib and orthographic definitions are prett much the same, but # they differ on the order of the cross products @@ -190,12 +281,10 @@ def get_basis(self, source_direction: SkyCoord): # MEGAlib's PA is counter-clockwise when looking at the sourse # Flip px <-> py - py,px = super().get_basis(source_direction) + py,px = super().get_basis_local(source_vector) # Sign of px - py = SkyCoord(-py.cartesian, - representation_type = 'cartesian', - frame = py.frame) + py = -py return px,py @@ -231,13 +320,14 @@ def __init__(self): angle of electric-vector maximum, e, starting from North and increasing through East. """ - super().__init__(ref_vector = SkyCoord(ra=0 * u.deg, dec=90 * u.deg, - frame="icrs"), + super().__init__(ref_vector = [0,0,1], + frame="icrs", clockwise = False) # Stereographic projection convention -class StereographicConvention(PolarizationConvention): +@PolarizationConvention.register("stereographic") +class StereographicConvention(PolarizationConvention, ConventionInSpacecraftFrameMixin): def __init__(self, clockwise: bool = False, @@ -262,20 +352,34 @@ def __init__(self, Spacecraft orientation """ - self._attitude = attitude + self._frame = SpacecraftFrame(attitude=attitude) self._sign = 1 if clockwise else -1 @property def frame(self): - return SpacecraftFrame(attitude = self._attitude) - - def get_basis(self, source_direction: SkyCoord): - # Extract Cartesian coordinates for the source direction - x, y, z = source_direction.cartesian.xyz + return self._frame + + def get_basis_local(self, source_vector:Union[np.ndarray[float], SkyCoord]): + """ + source_vector already in SC coordinates as a vector + + Parameters + ---------- + source_vector: (3,N) + + Returns + ------- + px,py: Basis vector. (2,N). Also in SC coordinates + """ + + if isinstance(source_vector, SkyCoord): + source_vector = source_vector.cartesian.xyz + + x,y,z = source_vector # Calculate the projection of the reference vector in stereographic coordinates - px_x = 1 - (x**2 - y**2) / (z + 1) ** 2 + px_x = 1 - (x ** 2 - y ** 2) / (z + 1) ** 2 px_y = -2 * x * y / (z + 1) ** 2 px_z = -2 * x / (z + 1) @@ -287,10 +391,7 @@ def get_basis(self, source_direction: SkyCoord): px /= norm # Calculate the perpendicular vector py using the cross product - py = self._sign*np.cross([x, y, z], px, axis=0) + py = self._sign * np.cross([x, y, z], px, axis=0) + + return px,py - # To SkyCoord - px = SkyCoord(*px, representation_type='cartesian', frame = self.frame) - py = SkyCoord(*py, representation_type='cartesian', frame = self.frame) - - return px, py diff --git a/cosipy/polarization/polarization_angle.py b/cosipy/polarization/polarization_angle.py index 0ec33b24..ed7fd418 100644 --- a/cosipy/polarization/polarization_angle.py +++ b/cosipy/polarization/polarization_angle.py @@ -7,7 +7,8 @@ class PolarizationAngle: - def __init__(self, angle, source, + def __init__(self, angle, + source: SkyCoord = None, convention = 'iau', *args, **kwargs): """ @@ -18,7 +19,7 @@ def __init__(self, angle, source, angle : :py:class:`astropy.coordinates.Angle Polarization angle source : :py:class:`astropy.coordinates.SkyCoord` - Source direction + Source direction. Optional, but needed to use vector() and transform_to() convention : PolarizationConvention Convention the defined the polarization basis and direction in the polarization plane (for which the source direction is normal) @@ -54,12 +55,19 @@ def convention(self): def source(self): return self._source + @source.setter + def source(self, coord: SkyCoord): + self._source = coord + @property def vector(self): """ Direction of the electric field vector """ + if self.source is None: + raise RuntimeError("Set source first") + # Get the projection vectors for the source direction in the # current convention px, py = self._convention.get_basis(self._source) @@ -86,6 +94,9 @@ def vector(self): def transform_to(self, convention, *args, **kwargs): + if self.source is None: + raise RuntimeError("Set source first") + # Standarize convention convention = PolarizationConvention.get_convention(convention, *args, **kwargs) diff --git a/cosipy/polarization/polarization_axis.py b/cosipy/polarization/polarization_axis.py new file mode 100644 index 00000000..5a59e98d --- /dev/null +++ b/cosipy/polarization/polarization_axis.py @@ -0,0 +1,162 @@ +import numpy as np +from .conventions import PolarizationConvention +from astropy import units as u + +from .polarization_angle import PolarizationAngle + +from histpy import Axis + + +class PolarizationAxis(Axis): + """ + Defines a polarization axis compatible with PolarizationAngle. + + Parameters: + edges (array-like): + Bin edges. Can be a Quantity array or PolarizationAngle + convention : PolarizationConvention + Convention defining the polarization basis in + the polarization plane (for which the source direction is normal). + Overrides the convention of "edges", if a PolarizationAngle object + was provided + label (str): Label for axis. If edges is an Axis object, this will + override its label + unit (unit-like): Unit for axis (will override unit of edges) + copy (bool): True if edge array should be distinct from passed-in + edges; if False, will use same edge array if possible + *args, **kwargs + Passed to convention class. + """ + + def __init__(self, + edges, + convention = 'iau', + label = None, + unit = None, + copy=True): + + if isinstance(edges, PolarizationAngle): + convention = edges.convention if convention is None else convention + edges = edges.angle + + super().__init__(edges, label = label, scale='linear', unit=unit, copy=copy) + + if self.unit is None: + raise ValueError("PolarizationAxis needs edges with units") + + self._convention = PolarizationConvention.get_convention(convention) + + @property + def convention(self): + return self._convention + + def _copy(self, edges=None, copy_edges=True): + """Make a deep copy of a HealpixAxis, optionally + replacing edge array. (The superclass's _copy + method handles edge replacement.) + """ + + new = super()._copy(edges, copy_edges) + + # self._convention is not copied. It's safe to share it. + + return new + + def _standardize_value(self, value): + if isinstance(value, PolarizationAngle): + # Transform to axis' convention + return value.transform_to(self.convention).angle + else: + return value + + def find_bin(self, value, right = False): + return super().find_bin(self._standardize_value(value), right = right) + + def interp_weights(self, values): + return super().interp_weights(self._standardize_value(values)) + + def interp_weights_edges(self, values): + return super().interp_weights_edges(self._standardize_value(values)) + + @property + def lower_bounds(self): + return PolarizationAngle(super().lower_bounds, convention=self.convention) + + @property + def upper_bounds(self): + return PolarizationAngle(super().upper_bounds, convention=self.convention) + + @property + def bounds(self): + return PolarizationAngle(super().bounds, convention=self.convention) + + @property + def lo_lim(self): + return PolarizationAngle(super().lo_lim, convention=self.convention) + + @property + def hi_lim(self): + return PolarizationAngle(super().hi_lim, convention=self.convention) + + @property + def edges(self): + return PolarizationAngle(super().edges, convention=self.convention) + + @property + def centers(self): + return PolarizationAngle(super().centers, convention=self.convention) + + def _write_metadata(self, axis_set): + """ + Save extra metadata to existing dataset + """ + + super()._write_metadata(axis_set) + + convention = PolarizationConvention.get_convention_registered_name(self._convention) + + if convention is None: + raise RuntimeError(f"Only PolarizationAxis object with a registered named convention " + "can be saved disk") + + axis_set.attrs['convention'] = convention + + @classmethod + def _open(cls, dataset): + """ + Create Axis from HDF5 dataset + Written as a virtual constructor so that + subclasses may override + """ + + + edges = np.asarray(dataset) + + metadata = cls._open_metadata(dataset) + + new = cls.__new__(cls) + PolarizationAxis.__init__(new, + edges = edges, + unit = metadata['unit'], + convention = metadata['convention'], + label = metadata['label'], + copy = False) + + return new + + @classmethod + def _open_metadata(cls, dataset): + """ + Returns unit, label and scale as a dictionary + """ + + metadata = super()._open_metadata(dataset) + + metadata['convention'] = dataset['convention'] + + return metadata + + + + + diff --git a/cosipy/polarization_fitting/__init__.py b/cosipy/polarization_fitting/__init__.py new file mode 100644 index 00000000..eba26341 --- /dev/null +++ b/cosipy/polarization_fitting/__init__.py @@ -0,0 +1 @@ +from .polarization_asad import PolarizationASAD \ No newline at end of file diff --git a/cosipy/polarization/polarization_asad.py b/cosipy/polarization_fitting/polarization_asad.py similarity index 99% rename from cosipy/polarization/polarization_asad.py rename to cosipy/polarization_fitting/polarization_asad.py index ff2154ff..56d281ca 100644 --- a/cosipy/polarization/polarization_asad.py +++ b/cosipy/polarization_fitting/polarization_asad.py @@ -44,7 +44,7 @@ class PolarizationASAD(): background : dict or Histogram, or list of same Unbinned or binned background model, or list of backgrounds if separated in time - sc_orientation : cosipy.spacecraftfile.SpacecraftFile.SpacecraftFile + sc_orientation : cosipy.spacecraftfile.SpacecraftHistory.SpacecraftHistory Spacecraft orientation response_file : str or pathlib.Path Path to detector response diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index 1463acc9..325882db 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -178,6 +178,10 @@ def axes(self): """ return self._axes + @property + def measurement_axes(self): + return self.axes['Em', 'Phi', 'PsiChi'] + @property def dtype(self): """ diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index d139a5fb..1dee8a0c 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -1,12 +1,24 @@ from histpy import Histogram +from astropy.coordinates import SkyCoord +from astropy.units import Quantity + +from cosipy.polarization.polarization_axis import PolarizationAxis +from cosipy.threeml.util import to_linear_polarization +from mhealpy import HealpixMap +from cosipy.interfaces import BinnedInstrumentResponseInterface, BinnedDataInterface +from histpy import Histogram, Axis, Axes # , Axes, Axis import numpy as np import astropy.units as u -from scoords import SpacecraftFrame, Attitude +from scoords import Attitude from .functions import get_integrated_spectral_model import logging + +from cosipy.spacecraftfile import SpacecraftAttitudeMap +from ..data_io import EmCDSBinnedData + logger = logging.getLogger(__name__) class PointSourceResponse(Histogram): @@ -43,6 +55,10 @@ def photon_energy_axis(self): return self.axes['Ei'] + @property + def measurement_axes(self): + return self.axes['Em', 'Phi', 'PsiChi'] + def get_expectation(self, spectrum, polarization=None, flux=None): """ Convolve the response with a spectral (and optionally, polarization) hypothesis to obtain the expected @@ -63,6 +79,8 @@ def get_expectation(self, spectrum, polarization=None, flux=None): Histogram with the expected counts on each analysis bin """ + polarization = to_linear_polarization(polarization) + if polarization is None: if 'Pol' in self.axes.labels: @@ -70,7 +88,6 @@ def get_expectation(self, spectrum, polarization=None, flux=None): raise RuntimeError("Must include polarization in point source response if using polarization response") contents = self.contents - axes = self.axes[1:] else: @@ -95,7 +112,6 @@ def get_expectation(self, spectrum, polarization=None, flux=None): contents = np.tensordot(weights, self.contents, axes=(0, self.axes.label_to_index('Pol'))) - axes = self.axes['Em', 'Phi', 'PsiChi'] if flux is None: energy_axis = self.photon_energy_axis @@ -105,7 +121,7 @@ def get_expectation(self, spectrum, polarization=None, flux=None): # if self is sparse, expectation will be a SparseArray with # no units, so set the result's unit explicitly - hist = Histogram(axes, contents = expectation, + hist = Histogram(self.measurement_axes, contents = expectation, unit = self.unit * flux.unit, copy_contents = False) @@ -113,3 +129,83 @@ def get_expectation(self, spectrum, polarization=None, flux=None): raise RuntimeError("Expectation should be dimensionless, but has units of " + str(hist.unit) + ".") return hist + + @classmethod + def from_dwell_time_map(cls, + data:BinnedDataInterface, + response: BinnedInstrumentResponseInterface, + exposure_map: HealpixMap, + energy_axis: Axis, + polarization_axis: PolarizationAxis = None + ): + + axes = [energy_axis] + + polarization_centers = None + if polarization_axis is not None: + axes += [polarization_axis] + polarization_centers = polarization_axis.centers + + axes += list(data.axes) + + psr = PointSourceResponse(axes, unit=u.cm * u.cm * u.s) + + for p in range(exposure_map.npix): + + coord = exposure_map.pix2skycoord(p) + + if exposure_map[p] != 0: + psr += response.differential_effective_area(data, coord, energy_axis.centers, polarization_centers) * exposure_map[p] + + return psr + + @classmethod + def from_scatt_map(cls, + coord: SkyCoord, + data:BinnedDataInterface, + response: BinnedInstrumentResponseInterface, + scatt_map: SpacecraftAttitudeMap, + energy_axis: Axis, + polarization_axis: PolarizationAxis = None + ): + """ + + Parameters + ---------- + measured_axes + response + scatt_map + energy_axis + polarization_axis + + Returns + ------- + + """ + + if not isinstance(data, EmCDSBinnedData): + raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") + + axes = [energy_axis] + + if polarization_axis is not None: + axes += [polarization_axis] + + axes += list(data.axes) + axes = Axes(axes) + + psr = Quantity(np.empty(shape=axes.shape), unit = u.cm * u.cm * u.s) + + for att, exposure in zip(scatt_map.attitudes, scatt_map.weights): + + response.differential_effective_area(data, + coord, + energy_axis.centers, + None if polarization_axis is None else polarization_axis.centers, + attitude = att, + weight=exposure, + out=psr, + add_inplace=True) + + return PointSourceResponse(axes, contents = psr) + diff --git a/cosipy/response/__init__.py b/cosipy/response/__init__.py index 28003419..fb397fae 100644 --- a/cosipy/response/__init__.py +++ b/cosipy/response/__init__.py @@ -4,3 +4,7 @@ from .GalacticResponse import GalacticResponse from .ExtendedSourceResponse import ExtendedSourceResponse from .RspConverter import RspConverter +from .threeml_response import * +from .threeml_point_source_response import * +from .instrument_response import * +from .rsp_to_arf_rmf import RspArfRmfConverter \ No newline at end of file diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py new file mode 100644 index 00000000..3ec3b08e --- /dev/null +++ b/cosipy/response/ideal_response.py @@ -0,0 +1,1074 @@ +import itertools +import warnings +from collections.abc import Callable +from typing import Iterable, Tuple, Union, Iterator + +from astropy.coordinates import Angle, SkyCoord +from astropy.units import Quantity +from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame +from cosipy.interfaces import ExpectationDensityInterface +from cosipy.interfaces.data_interface import EmCDSEventDataInSCFrameInterface +from cosipy.polarization import StereographicConvention, PolarizationConvention, PolarizationAngle +from cosipy.response.relative_coordinates import RelativeCDSCoordinates +from more_itertools.more import sample +from numpy._typing import NDArray +from scipy.optimize import minimize_scalar +from scipy.stats import rv_continuous, truncnorm, norm, uniform, randint, poisson +from scipy.stats.sampling import SimpleRatioUniforms +import astropy.units as u +import numpy as np + +from cosipy.interfaces.event import EmCDSEventInSCFrameInterface +from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface +from cosipy.interfaces.photon_parameters import PhotonInterface, PhotonWithDirectionAndEnergyInSCFrameInterface, \ + PhotonWithEnergyInterface, PhotonWithDirectionInSCFrameInterface +from cosipy.response.photon_types import \ + PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface as PolDirESCPhoton, \ + PhotonWithDirectionAndEnergyInSCFrame, PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention +from scipy.special import erfi, erf + +from cosipy.util.iterables import itertools_batched +from scoords import SpacecraftFrame + + +def _to_rad(angle): + if isinstance(angle, (Quantity, Angle)): + return angle.to_value(u.rad) + else: + return angle + + +class _SimpleRVSMixin: + """ + Helper mixin for custom distributions (rv_continuous subclasses) + using SimpleRatioUniforms + + Subclasses need to define _pdf + + """ + + @property + def _mode(self): + # Return analytic mode if you can. + # Otherwise it will be estimated numerically + return None + + def _simple_ratio_uniforms_rvs(self, *args, size=None, random_state=None): + if warnings.catch_warnings(): + # Suppress warning + # "WARNING RuntimeWarning: [objid: SROU] 22 : mode: try finding it (numerically) => (distribution) incomplete distribution object, entry missing" + # when the mode need to be computed analytically + + if self._mode is None: + warnings.filterwarnings( + "ignore", + message=r".*\[objid: SROU\].*", + category=RuntimeWarning, + ) + + rng = SimpleRatioUniforms(self, random_state=random_state, mode=self._mode) + + if size == (): + # SimpleRatioUniforms.rvs expects an integer, tuple of integers or None. + # It crashes with an empty tuple, which corresponds to a scalar. + size = None + + return rng.rvs(size=size) + + def _rvs(self, *args, **kwargs): + return self._simple_ratio_uniforms_rvs(*args, **kwargs) + +class _RVSMixin(_SimpleRVSMixin): + """ + Helper mixin for custom distributions (rv_continuous subclasses) + that will likely only get a sample per setup + + Subclasses need to define _pdf and _cdf + """ + + def _rvs(self, *args, size=None, **kwargs): + + # Faster than default _rvs for large sizes, but slow setup + # Most of the time we'll need a new setup per energy + + if size is None or size == tuple(): + return super()._rvs(*args, size=size, **kwargs) + else: + return self._simple_ratio_uniforms_rvs(*args, size = size, **kwargs) + +class KleinNishinaPolarScatteringAngleDist(_RVSMixin, rv_continuous): + """ + Klein-Nishina scattering angle distribution + """ + + def __init__(self, energy, *args, **kwargs): + + super().__init__(0, *args, a=0, b=np.pi, **kwargs) + + self._eps = energy.to_value(u.keV) / 510.99895069 # E/m_ec^2 + + # Normalization + # Mathematica + # Integrate[( + # Sin[\[Theta]] (1 + 1/( + # 1 + \[Epsilon] (1 - Cos[\[Theta]])) + \[Epsilon] (1 - + # Cos[\[Theta]]) - + # Sin[\[Theta]]^2))/(1 + \[Epsilon] (1 - + # Cos[\[Theta]]))^2, {\[Theta], 0, \[Pi]}, + # Assumptions -> {\[Epsilon] > 0}] + + A = 2 * self._eps * (2 + self._eps * (1 + self._eps) * (8 + self._eps)) / (1 + 2 * self._eps) ** 2 + B = (-2 + self._eps * (self._eps - 2)) * np.log(1 + 2 * self._eps) + + self._norm = (A + B) / self._eps ** 3 + + def _pdf(self, phi, *args): + + # Substitute Compton kinematic equation in Klein-Nishina dsigma/dOmega + # Mathematica + # eratio = 1 + self._eps (1 - cos_phi]) (*e/ep*) + # (1/eratio)^2 (1/eratio + eratio - Sin[\[Theta]]^2) Sin[\[Theta]] + + sin_phi = np.sin(phi) + cos_phi = np.cos(phi) + + A = 1 + (1 / (1 + self._eps * (1 - cos_phi))) + self._eps * (1 - cos_phi) - sin_phi ** 2 + B = (1 + self._eps * (1 - cos_phi)) ** 2 + + # Extra sin(phi) to account for phasespace + return sin_phi * A / B / self._norm + + def _cdf(self, phi, *args): + + # Mathematica + # Integrate[( + # Sin[\[Theta]] (1 + 1/( + # 1 + self._eps (1 - cos_phi])) + self._eps (1 - + # cos_phi]) - + # Sin[\[Theta]]^2))/ ((1 + self._eps (1 - + # cos_phi]))^2), {\[Theta], 0, \[Theta]p}, + # Assumptions -> {self._eps > 0, \[Theta]p < \[Pi], \[Theta]p > 0}] + + sin_phi = np.sin(phi) + cos_phi = np.cos(phi) + + eps = self._eps + eps2 = eps * eps + eps3 = eps2 * eps + + A = 1 + eps - eps * cos_phi + logA = np.log(A) + B = (eps * (4 + 10 * eps + 8 * eps2 + eps3) \ + - 2 * eps3 * cos_phi ** 3 \ + + 2 * (1 + eps) ** 2 * (-2 - 2 * eps + eps2) * logA \ + + eps2 * cos_phi ** 2 * (6 + 10 * eps + eps2 + 2 * (-2 - 2 * eps + eps2) * logA) \ + - 2 * eps * cos_phi * (2 + 8 * eps + 8 * eps2 + eps3 \ + + 2 * (-2 - 4 * eps - eps2 + eps3) * logA)) + C = 2 * eps3 * A * A + + return B / C / self._norm + +class KleinNishinaAzimuthalScatteringAngleDist(_RVSMixin, rv_continuous): + + def __init__(self, energy, theta, *args, **kwargs): + """ + Conditional probability, given a polar angle and energy. + + NOTE: input phi in pdf(phi) and cdf(phi) MUST lie between [0,2*pi]. The results are unpredictable otherwise. + + Parameters + ---------- + energy + theta: polar angle + args + kwargs + """ + + super().__init__(0, *args, a=0, b=2*np.pi, **kwargs) + + theta = _to_rad(theta) + + # precompute some stuff + self._eps = energy.to_value(u.keV) / 510.99895069 # E/m_ec^2 + self._sin_theta2 = np.sin(theta) ** 2 + self._energy_ratio = 1 + self._eps * (1 - np.cos(theta)) # From kinematics + self._energy_ratio2 = self._energy_ratio * self._energy_ratio + self._energy_ratio_inv = 1/self._energy_ratio + self._energy_ratio_inv2 = self._energy_ratio_inv * self._energy_ratio_inv + self._energy_ratio_inv3 = self._energy_ratio_inv2 * self._energy_ratio_inv + + # Mathematica + # Integrate[(1/eratio + eratio - 2 sintheta2 Cos[\[Phi]]^2)/ + # eratio^2, {\[Phi], 0, 2 \[Pi]}, + # Assumptions -> {\[Epsilon] > 0}] // FullSimplify + self._norm = 2 * np.pi * (1 + self._energy_ratio2 - self._sin_theta2 * self._energy_ratio) * self._energy_ratio_inv3 + + def _pdf(self, phi, *args): + """ + + Parameters + ---------- + phi: azimuthal angle, starting from the electric field vector direction + args + + Returns + ------- + + """ + + phi = _to_rad(phi) + + cos_phi = np.cos(phi) + + return (self._energy_ratio + self._energy_ratio_inv - 2 * self._sin_theta2 * cos_phi * cos_phi) * self._energy_ratio_inv2 / self._norm + + def _cdf(self, phi, *args): + + phi = _to_rad(phi) + + # Mathematica + # Integrate[(1/eratio + eratio - 2 sintheta2 Cos[\[Phi]]^2)/ + # eratio^2, {\[Phi], 0, \[Phi]lim}, + # Assumptions -> {\[Epsilon] > 0}] // FullSimplify + + A = phi + phi*self._energy_ratio2 - self._energy_ratio * self._sin_theta2 * phi - self._energy_ratio * self._sin_theta2 * np.cos(phi) * np.sin(phi) + + return A * self._energy_ratio_inv3 / self._norm + +class ARMNormDist(_SimpleRVSMixin, rv_continuous): + + def __init__(self, phi, angres, *args, **kwargs): + """ + This accounts for the truncating effect since ARM is limited to [-phi, pi-phi]. + It also accounts for the sin(phi+arm) phasespace + + Parameters + ---------- + phi: Polar scattering angle + angres: Standard deviation of the equivalent gaussian + args + kwargs + """ + + phi = _to_rad(phi) + angres = _to_rad(angres) + + super().__init__(0, *args, a=-phi, b= np.pi - phi, **kwargs) + + # normalized such that int_0^pi random_arm = 1 (already includes sin(phi+arm)) + # Integrate[PDF[TruncatedDistribution[{0,\[Pi]},NormalDistribution [\[Phi],\[Sigma]]], x]Sin[x],{x,0,\[Pi]}]//Re//FullSimplify + # Mathematica couldn't get only the real part analytically + + self._phi = phi + self._angres = angres + + self._norm = np.real( + np.exp(-(angres ** 2 / 2) - 1j * phi) * + (1j * erf((np.pi + 1j * angres ** 2 - phi) / (np.sqrt(2) * angres)) + + np.exp(2j * phi) * (erfi((angres ** 2 - 1j * phi) / (np.sqrt(2) * angres)) - + erfi((1j * np.pi + angres ** 2 - 1j * phi) / (np.sqrt(2) * angres))) + + erfi((angres ** 2 + 1j * phi) / (np.sqrt(2) * angres))) + / (2 * (erf(phi / (np.sqrt(2) * angres)) - erf((-np.pi + phi) / (np.sqrt(2) * angres))))) + + self._truncnorm_dist = truncnorm(-self._phi / self._angres, (np.pi - self._phi) / self._angres, 0, self._angres) + + def _pdf(self, arm, *args): + + return self._truncnorm_dist.pdf(arm) * np.sin(self._phi + arm) / self._norm + +class ARMMultiNormDist(rv_continuous): + + def __init__(self, phi, angres, angres_weights, *args, **kwargs): + """ + Describe the ARM distribution by a combination of multiple [truncated] gaussians + + Parameters + ---------- + phi + angres + angres_weights + args + kwargs + """ + + phi = _to_rad(phi) + angres = _to_rad(angres) + + super().__init__(0, *args, a=-phi, b= np.pi - phi, **kwargs) + + angres = np.atleast_1d(angres) + + weights = np.broadcast_to(angres_weights, angres.shape) + self._weights = weights / np.sum(weights) + self._dists = [ARMNormDist(phi, res) for res in angres] + + def _pdf(self, arm, *args): + + prob = np.zeros(np.shape(arm)) + + for w,dist in zip(self._weights,self._dists): + prob += w*dist._pdf(arm) + + return prob + + def _rvs(self, *args, size=None, random_state=None): + + if random_state is None: + random_state = self.random_state + + samples = np.empty(size) + + idx = random_state.choice(np.arange(len(self._dists)), size = size, p = self._weights) + + for i in range(len(self._dists)): + + dist = self._dists[i] + + mask = idx == i + + nmask = np.count_nonzero(mask) + + samples[mask] = dist._rvs(size = nmask) + + return samples + +class ThresholdKleinNishinaPolarScatteringAngleDist(KleinNishinaPolarScatteringAngleDist): + + def __init__(self, energy, energy_threshold=None, *args, **kwargs): + + super().__init__(energy) + + if energy_threshold is None: + self._renormalizable = True + self._renormalizable_error = None + self._min_phi = 0 + else: + + # Mathematica + # Solve[e/(e - edepmax) == 1 + \[Epsilon] (1 - (-1)), edepmax] + + max_energy_deposited = 2 * energy * self._eps / (1 + 2 * self._eps) + + if energy_threshold > max_energy_deposited: + self._renormalizable = False + self._renormalizable_error = ValueError( + f"Threshold ({energy_threshold}) is greater than the maximum possible deposited energy ({max_energy_deposited}). PDF cannot be normalized") + else: + self._renormalizable = True + self._renormalizable_error = None + + # Mathematica + # Solve[e/(e - ethresh) == + # 1 + \[Epsilon] (1 - Cos[\[Theta]]), \[Theta] ] + + energy_threshold = energy_threshold.to_value(energy.unit) + energy = energy.value + + eps_ediff = self._eps * (energy - energy_threshold) + + self._min_phi = np.arccos((eps_ediff - energy_threshold) / eps_ediff) + + # Renormalize + self._cdf_min_phi = None + self._norm_factor = None + + if self._renormalizable: + self._cdf_min_phi = super()._cdf(self._min_phi) + self._norm_factor = 1 / (1 - self._cdf_min_phi) + + def _renormalize(self, phi, prob): + + if np.isscalar(phi): + if phi < self._min_phi: + prob = 0 + else: + prob = np.asarray(prob) + phi = np.asarray(phi) + prob[phi < self._min_phi] = 0 + + prob *= self._norm_factor + + return prob + + def _pdf(self, phi, *args): + + if not self._renormalizable: + # While the PDF can't be normalized, + # and there we can't have a CDF or RVS, + # we can still return the probability = 0 + # to prevent other code from crashing + return np.zeros_like(phi) + + phi = _to_rad(phi) + + prob = super()._pdf(phi, *args) + + return self._renormalize(phi, prob) + + def _cdf(self, phi, *args): + + if not self._renormalizable: + raise self._renormalizable_error + + phi = _to_rad(phi) + + cum_prob = super()._cdf(phi, *args) - self._cdf_min_phi + + return self._renormalize(phi, cum_prob) + + def _rvs(self, *args, **kwargs): + if not self._renormalizable: + raise self._renormalizable_error + + return super()._rvs(*args, **kwargs) + +class MeasuredEnergyDist(rv_continuous): + + def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): + """ + This is a *conditional* probability. We will assume the uncertainty on the measured angle phi is 0 + (all the CDS errors will come from the ARM distribution) + + If it is fully absorbed, then the deposited energy equal the initial energy. + + If it escaped, then it will assume that the deposited energy corresponds to the energy of the first hit, + following the Compton equation + + The measured energy will be drawn from a normal distribution + centered at the deposited energy and std equal to energy_deposited*energy_res + + The geometry was not taking into account for the backscatter criterion since it was too complicated. + + Inputs and outputs are values assumed to be in the same units as input energy. + + Parameters + ---------- + energy: initial energy. + energy_res: function returning the energy resolution function of energy. Both input and output have energy units + phi: polar scattered angle + full_absorp_prob: probability of landing in the photopeak + args + kwargs + """ + + super().__init__(0, *args, a=0, **kwargs) + + if full_absorp_prob < 0 or full_absorp_prob > 1: + raise ValueError(f"full_absorp_prob must be between [0,1]. Got {full_absorp_prob}") + + eps = (energy / u.Quantity(510.99895069, u.keV)).value + + phi = _to_rad(phi) + energy_deposited = energy * (1 - 1 / (1 + eps * (1 - np.cos(phi)))) + + self._full_prob = full_absorp_prob + self._partial_prob = 1 - full_absorp_prob + + self._dist_full = norm(loc=energy.value, scale = energy_res(energy).to_value(energy.unit)) + self._dist_partial = norm(loc=energy_deposited.value, scale = energy_res(energy_deposited).to_value(energy.unit)) + + def _pdf(self, measured_energy, *args): + return self._full_prob * self._dist_full.pdf(measured_energy) + self._partial_prob * self._dist_partial.pdf(measured_energy) + + def _cdf(self, measured_energy, *args): + return self._full_prob * self._dist_full.cdf(measured_energy) + self._partial_prob * self._dist_partial.cdf(measured_energy) + + def _rvs(self, *args, size=None, random_state=None): + + full_absorp = uniform.rvs(size=size, random_state = random_state) < self._full_prob + + nfull = np.count_nonzero(full_absorp) + npartial = full_absorp.size - nfull + + samples = np.empty(full_absorp.shape) + + samples[full_absorp] = self._dist_full.rvs(*args, size=nfull, random_state=random_state) + samples[np.logical_not(full_absorp)] = self._dist_partial.rvs(*args, size=npartial, random_state=random_state) + + return samples + +class LogGaussianCosThetaEffectiveArea: + + def __init__(self, + max_area:Quantity, + max_area_energy:Quantity, + sigma_decades: float, + batch_size = 1000): + """ + The effective area is represented as a log-gaussian as function of energy and + a cos(theta) dependence as a function of the instrument colatitude theta. + =0 beyond theta = 90 deg + + Parameters + ---------- + max_area: maximum effective area + max_area_energy: energy where the effective area peaks + sigma_decades: + """ + + self._max_area = max_area + self._max_area_energy = max_area_energy.to_value(u.keV) + self._sigma_decades = sigma_decades + + self._batch_size = batch_size + + def __call__(self, photons = Iterable[PhotonWithDirectionAndEnergyInSCFrameInterface]) -> Iterable[Quantity]: + """ + """ + + for batch in itertools_batched(photons, self._batch_size): + + energy = [] + latitude = [] + + for photon in batch: + + energy.append(photon.energy_keV) + latitude.append(photon.direction_lat_radians) + + energy = np.asarray(energy) + latitude = np.asarray(latitude) + + area = self._max_area * np.exp(-np.log10(energy / self._max_area_energy) ** 2 / 2 / self._sigma_decades / self._sigma_decades) + + area *= np.sin(latitude) + area[latitude < 0] = 0 + + yield from area + +class ConstantFractEnergyRes: + + def __init__(self, energy_res): + """ + + Parameters + ---------- + energy_res: fraction + """ + + self._energy_res = energy_res + + def __call__(self, energy) -> Quantity: + """ + """ + + return self._energy_res * energy + +class ConstantAngularResolution: + + def __init__(self, angres, weights = None): + self._angres = np.atleast_1d(angres) + + if weights is None: + weights = np.ones(self._angres.size) + + self._weights = weights / np.sum(weights) + + def __call__(self, photons=Iterable[PhotonWithDirectionInSCFrameInterface]) -> Iterable[Quantity]: + for _ in photons: + yield self._angres, self._weights + +class ConstantTimesExponentialCutoffFullAbsorption: + + def __init__(self, base:float, cutoff_energy:Quantity, batch_size = 1000): + self._base = base + self._cutoff_energy = cutoff_energy.to_value(u.keV) + self._batch_size = batch_size + + def __call__(self, photons = Iterable[PhotonWithEnergyInterface]) -> Iterable[Quantity]: + """ + """ + + for batch in itertools_batched(photons, self._batch_size): + + energy = np.asarray([photon.energy_keV for photon in batch]) + + prob = self._base * np.exp(-energy / self._cutoff_energy) + + yield from prob + +class UnpolarizedIdealComptonIRF(FarFieldInstrumentResponseFunctionInterface): + + # The photon class and event class that the IRF implementation can handle + photon_type = PhotonWithDirectionAndEnergyInSCFrameInterface + event_type = EmCDSEventInSCFrameInterface + + def __init__(self, + effective_area:Callable[[Iterable[PhotonInterface]], Quantity], + energy_resolution:Callable[[Quantity], Quantity], + angular_resolution:Callable[[PhotonInterface], Tuple[Quantity, np.ndarray[float]]], + full_absorption_prob:Callable[[Iterable[PhotonInterface]], Quantity], + energy_threshold:Union[None, Quantity] = None + ): + + self._effective_area = effective_area + self._energy_resolution = energy_resolution + self._angular_resolution = angular_resolution + self._full_prob = full_absorption_prob + + if energy_threshold is None: + self.energy_threshold = 0*u.keV + else: + self._energy_threshold = energy_threshold + + self._pol_convention = StereographicConvention() + + @classmethod + def cosi_like(cls, + max_area = 110 * u.cm * u.cm, + max_area_energy = 1500 * u.keV, + sigma_decades = 0.4, + energy_resolution = 0.01, + angres = 3*u.deg, + angres_fact = [1 / 3., 1, 3, 9], + angres_weights = [1, 4, 10, 20], + full_absorption_constant = 0.5, + full_absorption_exp_cutoff = 10*u.MeV, + energy_threshold = 20*u.keV): + """ + Similar performance as COSI. Meant for code development, not science or sensitivity predictions. + + Returns + ------- + + """ + + # This angres_fact give a FWHM approx = angres, but with long tails + max_area = 110 * u.cm * u.cm if max_area is None else max_area + max_area_energy = 1500 * u.keV if max_area_energy is None else max_area_energy + sigma_decades = 0.4 if sigma_decades is None else sigma_decades + energy_resolution = 0.01 if energy_resolution is None else energy_resolution + angres = 3 * u.deg if angres is None else angres + angres_fact = np.asarray([1/3.,1,3,9,27])/3 if angres_fact is None else angres_fact + angres_weights = np.asarray([1,4,5,20,30]) if angres_weights is None else angres_weights + full_absorption_constant = 0.7 if full_absorption_constant is None else full_absorption_constant + full_absorption_exp_cutoff = 10 * u.MeV if full_absorption_exp_cutoff is None else full_absorption_exp_cutoff + energy_threshold = 20 * u.keV if energy_threshold is None else energy_threshold + + angres_fact = np.asarray(angres_fact) + angres_weights = np.asarray(angres_weights) + + effective_area = LogGaussianCosThetaEffectiveArea(max_area, max_area_energy, sigma_decades) + energy_resolution = ConstantFractEnergyRes(energy_resolution) + angular_resolution = ConstantAngularResolution(angres * angres_fact, angres_weights) + full_absorption_prob = ConstantTimesExponentialCutoffFullAbsorption(full_absorption_constant, full_absorption_exp_cutoff) + + return cls(effective_area, + energy_resolution, + angular_resolution, + full_absorption_prob, + energy_threshold) + + def _az_prob(self, photon, phi, az): + return 1/2/np.pi + + def _random_az(self, photon, phi): + return 2*np.pi*uniform.rvs() + + def _event_probability(self, photon:PolDirESCPhoton, + phi:float, + events:Iterable[EmCDSEventInSCFrameInterface]) -> Iterable[float]: + """ + Computes the probability for a given set of photon parameters, and for all events with the same phi + + Note: it is assumed that all events have the same phi!!! + """ + + # Get some needed values from this query + photon_energy_keV = photon.energy_keV + photon_energy = Quantity(photon_energy_keV, u.keV, copy = False) + measured_energy_keV = np.asarray([event.energy_keV for event in events]) + full_absorp_prob = next(self._full_prob([photon])) + angres, weights = next(self._angular_resolution([photon])) + psichi_lon = [event.scattered_lon_rad_sc for event in events] + psichi_lat = [event.scattered_lat_rad_sc for event in events] + psichi = SkyCoord(lon = psichi_lon, lat = psichi_lat, unit = u.rad, frame = SpacecraftFrame()) + + # Convert CDF to relative + phi_geom, az = RelativeCDSCoordinates(photon.direction, self._pol_convention).to_relative(psichi) + + # Get probability + # We're assuming the phi measured from kinematics has no errors. Otherwise, the calculation became too complex + # All directional error come from the uncertainty on psichi (through the ARM, in psichi_geom) + # P(phi|Ei) * P(Em | Ei, phi) * P(psichi | phi, Ei, PA) + # P(psichi | phi, Ei, PA) = P(arm | phi) * P(az | phi, Ei) + + prob = ThresholdKleinNishinaPolarScatteringAngleDist(photon_energy, self._energy_threshold).pdf(phi) + prob *= MeasuredEnergyDist(photon_energy, self._energy_resolution, phi, full_absorp_prob).pdf(measured_energy_keV) + prob *= ARMMultiNormDist(phi, angres, weights).pdf(phi_geom.rad - phi) + prob *= self._az_prob(photon, phi, az.rad) + + return prob + + def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + + The units of the output the inverse of the phase space of the class event_type data space. + e.g. if the event measured photon_energy in keV, the units of output of this function are implicitly 1/keV + + NOTE: this implementation runs fast if you sort the queries by photon, following by the event phi. + """ + + # This allows to sample the PDF for multiple values at once + # Multiple event with the phi pretty much only happen during testing though, + # since for real data the same measured values will not be repeating + last_photon = None + last_phi = None + cached_events = [] + + for photon,event in query: + + phi = event.scattering_angle_rad + + if last_photon is None: + # This only happens for the first event + last_photon = photon + last_phi = phi + cached_events = [event] + continue + + if photon is last_photon: + # We can keep caching values, unless phi changed + + if last_phi is phi: + # Same photon and phi. Keep caching events + cached_events.append(event) + else: + # It's not longer the same. We now need to evaluate and yield what we have so far + yield from self._event_probability(last_photon, last_phi, cached_events) + + # Restart + last_photon = photon + last_phi = phi + cached_events = [event] + + else: + # It's not longer the same. We now need to evaluate and yield what we have so far + yield from self._event_probability(last_photon, last_phi, cached_events) + + # Restart + last_photon = photon + last_phi = phi + cached_events = [event] + + # Yield the probability for the leftover events + yield from self._event_probability(last_photon, last_phi, cached_events) + + def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEventInSCFrameInterface]: + """ + Return a stream of random events, photon by photon. + """ + + for photon in photons: + + energy = photon.energy + full_absorp_prob = next(self._full_prob([photon])) + + # Random polar (phi) and azimuthal angle from Klein Nishina + phi = ThresholdKleinNishinaPolarScatteringAngleDist(energy, self._energy_threshold).rvs() + azimuth = self._random_az(photon, phi) + + # Get the measured energy based on phi and the energy resolution and absroption probabity for the photon location + measured_energy = MeasuredEnergyDist(energy, self._energy_resolution, phi, full_absorp_prob).rvs() + measured_energy_keV = Quantity(measured_energy, energy.unit, copy=False).to_value(u.keV) + + # Get a random ARM + angres, weights = next(self._angular_resolution([photon])) + arm = ARMMultiNormDist(phi, angres, weights).rvs() + + # Transform arm and az to psichi + psichi = RelativeCDSCoordinates(photon.direction, self._pol_convention).to_cds(phi + arm, azimuth) + + # Put everything in the output event + # The assummed probability assumes that phi is measured exactly, all the uncertainty comes from the error + # in psichi (through the ARM) + yield EmCDSEventInSCFrame(measured_energy_keV, phi, psichi.lon.rad, psichi.lat.rad) + + + def effective_area_cm2(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[float]: + """ + + """ + return [a.to_value(u.cm*u.cm) for a in self._effective_area(photons)] + + +class IdealComptonIRF(UnpolarizedIdealComptonIRF): + + photon_type = PolDirESCPhoton + + def _az_prob(self, photon, phi, az): + pa = photon.polarization_angle_rad + return KleinNishinaAzimuthalScatteringAngleDist(photon.energy, phi).pdf((az - pa) % (2 * np.pi)) + + def _random_az(self, photon, phi): + pa = photon.polarization_angle_rad + return KleinNishinaAzimuthalScatteringAngleDist(photon.energy, phi).rvs() + pa + +class RandomEventDataFromLineInSCFrame(EmCDSEventDataInSCFrameInterface): + + def __init__(self, + irf:FarFieldInstrumentResponseFunctionInterface, + flux:Quantity, + duration:Quantity, + energy:Quantity, + direction:SkyCoord, + polarized_irf:FarFieldInstrumentResponseFunctionInterface, + polarization_degree:float = None, + polarization_angle:Union[Angle, Quantity] = None, + polarization_convention:PolarizationConvention = None): + """ + + Parameters + ---------- + irf: Must handle PhotonWithDirectionAndEnergyInSCFrameInterface + flux: Source flux in unit of 1/area/time + duration: Integration time + energy: Source energy (a line) + direction: Source direction (in SC coordinates) + polarized_irf: Must handle PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface + polarization_degree + polarization_angle + polarization_convention + """ + + unpolarized_irf = irf + + self.event_type = unpolarized_irf.event_type + + flux_cm2_s = flux.to_value(1/u.cm/u.cm/u.s) + duration_s = duration.to_value(u.s) + + energy_keV = energy.to_value(u.keV) + direction = direction.transform_to('spacecraftframe') + source_direction_lon_rad = direction.lon.rad + source_direction_lat_rad = direction.lat.rad + + unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(source_direction_lon_rad, + source_direction_lat_rad, + energy_keV) + + unpolarized_expected_counts = next(iter(irf.effective_area_cm2([unpolarized_photon]))) * flux_cm2_s * duration_s + + if polarization_degree is None: + polarization_degree = 0 + + if polarization_degree < 0 or polarization_degree > 1: + raise ValueError(f"polarization_degree must lie between 0 and 1. Got {polarization_degree}") + + if polarization_degree == 0: + polarized_irf = None + polarized_expected_counts = 0 + + else: + + polarized_irf = polarized_irf + + if polarized_irf.event_type is not unpolarized_irf.event_type: + raise TypeError(f"Both IRF need to have the same event type. Got {unpolarized_irf.event_type} and {polarized_irf.event_type}") + + polarization_angle_rad = PolarizationAngle(polarization_angle, direction, polarization_convention).transform_to('stereographic').angle.rad + + polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention(source_direction_lon_rad, + source_direction_lat_rad, + energy_keV, + polarization_angle_rad) + + unpolarized_expected_counts *= (1 - polarization_degree) + polarized_expected_counts = polarization_degree * next(iter(polarized_irf.effective_area_cm2([polarized_photon]))) * flux_cm2_s * duration_s + + unpolarized_counts = poisson(unpolarized_expected_counts).rvs() + polarized_counts = poisson(polarized_expected_counts).rvs() + + self._events = [] + + unpolarized_events = iter(unpolarized_irf.random_events(itertools.repeat(unpolarized_photon, unpolarized_counts))) + + polarized_events = None + if polarized_counts > 0: + polarized_events = iter(polarized_irf.random_events(itertools.repeat(polarized_photon, polarized_counts))) + + nthrown_unpolarized = 0 + nthrown_polarized = 0 + + while nthrown_unpolarized < unpolarized_counts or nthrown_polarized < polarized_counts: + + if np.random.uniform() < polarization_degree: + # Polarized component + if nthrown_polarized < polarized_counts: + self._events.append(next(polarized_events)) + nthrown_polarized += 1 + else: + # Unpolarized component + if nthrown_unpolarized < unpolarized_counts: + self._events.append(next(unpolarized_events)) + nthrown_unpolarized += 1 + + def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: + """ + Return one Event at a time + """ + yield from self._events + + @property + def nevents(self) -> int: + return len(self._events) + +class ExpectationFromLineInSCFrame(ExpectationDensityInterface): + + def __init__(self, + data:EmCDSEventDataInSCFrameInterface, + irf:FarFieldInstrumentResponseFunctionInterface, + flux:Quantity, + duration:Quantity, + energy:Quantity, + direction:SkyCoord, + polarized_irf:FarFieldInstrumentResponseFunctionInterface, + polarization_degree:float = None, + polarization_angle:Union[Angle, Quantity] = None, + polarization_convention:PolarizationConvention = None): + + self._unpolarized_irf = irf + self._polarized_irf = polarized_irf + + self._duration_s = duration.to_value(u.s) + self._data = data + + self._flux_cm2_s = None + self._energy_keV = None + self._direction = None + self._source_direction_lon_rad = None + self._source_direction_lat_rad = None + self._polarization_degree = None + self._polarization_angle_rad = None + self._polarization_convention = None + self._unpolarized_photon = None + self._polarized_photon = None + self.set_model(flux = flux, + energy= energy, + direction=direction, + polarization_degree=polarization_degree, + polarization_angle=polarization_angle, + polarization_convention = polarization_convention) + + # Cache + self._cached_energy_keV = None + self._cached_direction = None + self._cached_pol_angle_rad = None + self._cached_pol_degree = None + self._cached_diff_aeff = None # Per flux unit + self._cached_event_probability = None + self._cached_event_probability_unpolarized = None + self._cached_event_probability_polarized = None + + def set_model(self, + flux:Quantity = None, + energy:Quantity = None, + direction:SkyCoord = None, + polarization_degree: float = None, + polarization_angle: Union[Angle, Quantity] = None, + polarization_convention: PolarizationConvention = None + ): + """ + Parameters not set default to current values + """ + + if flux is not None: + self._flux_cm2_s = flux.to_value(1 / u.cm / u.cm / u.s) + + if energy is not None: + self._energy_keV = energy.to_value(u.keV) + + if direction is not None: + direction = direction.transform_to('spacecraftframe') + self._direction = direction + self._source_direction_lon_rad = direction.lon.rad + self._source_direction_lat_rad = direction.lat.rad + + if polarization_degree is not None: + self._polarization_degree = polarization_degree + + if self._polarization_degree is None: + self._polarization_degree = 0 + + if self._polarization_degree < 0 or self._polarization_degree > 1: + raise ValueError(f"polarization_degree must lie between 0 and 1. Got {self._polarization_degree}") + + if self._polarization_degree > 0: + + if self._polarized_irf is None: + raise ValueError("Polarization degree >0 but polarized IRF is None") + + if polarization_convention is not None: + self._polarization_convention = polarization_convention + + if polarization_angle is not None: + self._polarization_angle_rad = PolarizationAngle(polarization_angle, self._direction, + self._polarization_convention).transform_to('stereographic').angle.rad + + self._unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(self._source_direction_lon_rad, + self._source_direction_lat_rad, + self._energy_keV) + + self._polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention( + self._source_direction_lon_rad, + self._source_direction_lat_rad, + self._energy_keV, + self._polarization_angle_rad) + + def _update_cache(self): + + if (self._cached_energy_keV is None + or self._energy_keV != self._cached_energy_keV + or self._direction != self._cached_direction + or self._polarization_angle_rad != self._cached_pol_angle_rad + or self._polarization_degree != self._cached_pol_degree): + #Either it's the first time or the energy changed + + unpolarized_diff_aeff = (1 - self._polarization_degree) * next( + iter(self._unpolarized_irf.effective_area_cm2([self._unpolarized_photon]))) + + if (self._cached_event_probability_unpolarized is None + or self._energy_keV != self._cached_energy_keV + or self._direction != self._cached_direction): + # Energy or direction can affect the unpolarized response, but not PA nor PD + self._cached_event_probability_unpolarized = np.fromiter(self._unpolarized_irf.event_probability([(self._unpolarized_photon, e) for e in self._data]),dtype=float) + + if self._polarization_degree > 0: + + polarized_diff_aeff = self._polarization_degree * next(iter(self._polarized_irf.effective_area_cm2([self._polarized_photon]))) + + self._cached_diff_aeff = unpolarized_diff_aeff + polarized_diff_aeff + + if (self._cached_event_probability_polarized is None + or self._energy_keV != self._cached_energy_keV + or self._direction != self._cached_direction + or self._polarization_angle_rad != self._cached_pol_angle_rad): + # Energy, direction or PA can affect the unpolarized response, but not PD + self._cached_event_probability_polarized = np.fromiter(self._polarized_irf.event_probability([(self._polarized_photon, e) for e in self._data]), dtype=float) + + self._cached_event_probability = ( 1 - self._polarization_degree) * self._cached_event_probability_unpolarized + self._polarization_degree * self._cached_event_probability_polarized + + else: + + self._cached_diff_aeff = unpolarized_diff_aeff + + self._cached_event_probability = self._cached_event_probability_unpolarized + + self._cached_energy_keV = self._energy_keV + self._cached_direction = self._direction + self._cached_pol_angle_rad = self._polarization_angle_rad + self._cached_pol_degree = self._polarization_degree + + def expected_counts(self) -> float: + + self._update_cache() + + return self._cached_diff_aeff * (self._flux_cm2_s * self._duration_s) + + def event_probability(self) -> Iterable[float]: + + self._update_cache() + + yield from self._cached_event_probability + diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py new file mode 100644 index 00000000..b69ba178 --- /dev/null +++ b/cosipy/response/instrument_response.py @@ -0,0 +1,291 @@ +from typing import Union + +import numpy as np +from astropy.coordinates import SkyCoord +import astropy.units as u +from astropy.units import Quantity +from scoords import Attitude, SpacecraftFrame + +from cosipy.data_io import EmCDSBinnedData +from cosipy.interfaces import BinnedDataInterface +from cosipy.interfaces.instrument_response_interface import BinnedInstrumentResponseInterface + +from cosipy.polarization import PolarizationAngle, PolarizationAxis +from cosipy.response import FullDetectorResponse + +from histpy import Axes, Histogram + + +__all__ = ["BinnedInstrumentResponse"] + +class BinnedInstrumentResponse(BinnedInstrumentResponseInterface): + + def __init__(self, response:FullDetectorResponse): + + self._dr = response + + @property + def is_polarization_response(self): + return 'Pol' in self._dr.axes.labels + + def differential_effective_area(self, + data: BinnedDataInterface, + direction: SkyCoord, + energy:u.Quantity, + polarization:PolarizationAngle = None, + attitude:Attitude = None, + weight:Union[Quantity, float] = None, + out:Quantity = None, + add_inplace:bool = False) -> Quantity: + """ + Interpolations and bin coupling: + * The direction is always bi-linearly interpolated. + * Ei, Em and Phi always needs to match the response exactly + * If PsiChi is in local coordinates, PsiChi and polarization need to match the response exactly + * If PsiChi is in inertial coordinates, PsiChi and polarization are interpolated at 0-th order during the rotation + + Parameters + ---------- + data + Binned measurements. We can only handle EmCDSBinnedData + direction: + Photon incoming direction in SC coordinates + energy: + Photon energy + polarization + Photon polarization angle + attitude + Attitude defining the orientation of the SC in an inertial coordinate system. + weight + Optional. Weighting the result by a given weight. Providing the weight at this point as opposed to + apply it to the output can result in greater efficiency. + out: + Optional. Histogram to store the output. If possible, the implementation should try to avoid allocating + new memory. + add_inplace + If True and a Histogram output was provided, we will try to avoid allocating new + memory and add --not set-- the result of this operation to the output. + + Returns + ------- + The effective area times the event measurement probability distribution integrated on each of the bins + of the provided axes + """ + + # Check if we're getting the expected axes and other limitations + if not isinstance(data, EmCDSBinnedData): + raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") + + axes = data.axes + + if set(axes.labels) != {'Em','PsiChi','Phi'}: + raise ValueError(f"Unexpected axes labels. Expecting \"{{'Em','PsiChi','Phi'}}\", got {axes.labels}") + + if self._dr.measurement_axes["Em"] != axes["Em"]: + # Matches the v0.3 behaviour + raise ValueError("This implementation can only handle a fixed measured energy (Em) binning equal to the underlying response file.") + + if self._dr.measurement_axes["Phi"] != axes["Phi"]: + # Matches the v0.3 behaviour + raise ValueError("This implementation can only handle a fixed scattering angle (Phi) binning equal to the underlying response file.") + + if not np.array_equal(energy, self._dr.axes['Ei'].centers): + # Matches the v0.3 behaviour + raise RuntimeError("Currently, the probed energy values need to match the underlying response matrix Ei centers.") + + results_axes_labels = ['Ei'] + + if polarization is not None: + if not self.is_polarization_response: + raise RuntimeError("The FullDetectorResponse does not contain polarization information") + + if axes["PsiChi"].coordsys is None: + raise ValueError("PsiChi axes doesn't have a coordinate system") + + if direction.shape != (): + raise ValueError("Currently this implementation can only deal with one direction at a time") + + # Fork for local and galactic PsiChi coordinates + if not isinstance(axes["PsiChi"].coordsys, SpacecraftFrame): + # Is inertial + if attitude is None: + raise RuntimeError("User need to provide the attitude information in order to transform to spacecraft coordinates") + + return self._differential_effective_area_inertial(attitude, axes, direction, polarization, weight, out, add_inplace) + + # Is local + + # Check again remaining axes + if self._dr.measurement_axes["PsiChi"] != axes["PsiChi"]: + # Matches the v0.3 behaviour + raise ValueError("This implementation can only handle a fixed scattering direction (PsiChi) binning equal to the underlying response file.") + + if polarization is not None: + if not np.array_equal(polarization, self._dr.axes['Pol'].centers): + # Matches the v0.3 behaviour + raise RuntimeError( + "Currently, the probed polarization angles need to match the underlying response matrix Pol centers.") + + # Get the pixel as is since we already checked that the requested + # energy and polarization points match the underlying response centers + # Matches the v0.3 behaviour + pix = self._dr.ang2pix(direction) + + # TODO: Update after Pr364. get_pixel(pix, weight) should make this more efficient + if weight is not None: + result = self._dr[pix] * weight + else: + result = self._dr[pix] + + # Fix order of output axes to the standard by the interface + results_axes_labels = ['Ei'] + + if polarization is not None: + results_axes_labels += ['Pol'] + + results_axes_labels += list(axes.labels) + + result = result.project(results_axes_labels) + + if polarization is None and self.is_polarization_response: + # It was implicitly converted to unpolarized response by the + # projection above, but this is still needed to get the mean + result /= self._dr.axes.nbins + + return self._fill_out_and_return(result, out, add_inplace) + + @staticmethod + def _fill_out_and_return(result:Histogram, out:Quantity, add_inplace:bool = False) -> Quantity: + + if out is None: + # Convert to base class + return result.contents + else: + + if out.shape != result.shape: + raise ValueError("The provided out argument doesn't have the right shape." + f"Expected {result.shape}, got {out.axes.shape}") + + if add_inplace: + out += result.contents + else: + out[:] = result.contents + + return out + + def _differential_effective_area_inertial(self, + attitude:Attitude, + axes:Axes, + direction: SkyCoord, + polarization:PolarizationAngle = None, + weight:Union[float, Quantity] = None, + out: Quantity = None, + add_inplace:bool = False, + ) -> Quantity: + """ + Will rotate PsiChi from local to inertial coordinates + + Parameters + ---------- + axes + direction + energy + polarization + attitude + + Returns + ------- + + """ + + # Generate axes that will allow us to use _sum_rot_hist, + # and obtain the same results as in v3.x + out_axes = [self._dr.axes['Ei']] + + + + if self.is_polarization_response: + + raise RuntimeError("Fix me. No pol yet") + + # Since we're doing a 0-th order interpolation, the only thing that matter are the bin centers, + # so we're placing them at the input polarization angles + + if np.any(polarization.angle[1:] - polarization.angle[:-1] < 0): + raise ValueError("This implementation requires strictly monotonically increasing polarization angles") + + pol_edges = (polarization.angle[:-1] + polarization.angle[1:])/2 + + pol_edges = np.concatenate(pol_edges[0] - 2*(pol_edges[0] - polarization.angle[0]), pol_edges) + pol_edges = np.concatenate(pol_edges, pol_edges[-1] + 2 * (polarization.angle[-1] - pol_edges[-1])) + + out_axes += [PolarizationAxis(pol_edges, convention = polarization.convention)] + + out_axes += list(axes) + out_axes = Axes(out_axes) + + if weight is None: + # Weight takes the role of the exposure in _sum_rot_hist, which is not an optional argument + weight = 1 + + # Almost copy-paste from FullDetectorResponse.get_point_source_response(). Improve to avoid duplicated code + def rotate_coords(c, rot): + """ + Apply a rotation matrix to one or more 3D directions + represented as Cartesian 3-vectors. Return rotated directions + in polar form as a pair (co-latitude, longitude) in + radians. + + """ + c_local = rot @ c + + c_x, c_y, c_z = c_local + + theta = np.arctan2(c_y, c_x) + phi = np.arccos(c_z) + + return (phi, theta) + + rot = attitude.transform_to('icrs').rot.inv().as_matrix() + + src_cart = direction.transform_to('icrs').cartesian.xyz.value + loc_src_colat, loc_src_lon = rotate_coords(src_cart, rot) + loc_src_pixels = self._dr._axes['NuLambda'].find_bin(theta=loc_src_colat, + phi=loc_src_lon) + + sf_psichi_axis = axes['PsiChi'] + sf_psichi_dirs = sf_psichi_axis.pix2skycoord(np.arange(sf_psichi_axis.nbins)) + sf_psichi_dirs_cart = sf_psichi_dirs.transform_to('icrs').cartesian.xyz.value + loc_psichi_colat, loc_psichi_lon = rotate_coords(sf_psichi_dirs_cart, rot) + loc_psichi_pixels = self._dr._axes['PsiChi'].find_bin(theta=loc_psichi_colat, + phi=loc_psichi_lon) + + + # Either initialize a new or clear cache + if out is None: + out = Quantity(np.zeros(out_axes.shape), dr_pix.unit) + else: + if not add_inplace: + out[:] = 0 + + if isinstance(weight, u.Quantity): + weight_unit = weight.unit + weight = weight.value + else: + weight_unit = None + + out.value[:] += self._dr._rot_psr(out_axes, weight, + loc_psichi_pixels, + (loc_src_pixels,)) + + if weight_unit is not None: + out = u.Quantity(out.value, weight_unit*out.unit, copy = False) + + return out + + + + + + + diff --git a/cosipy/response/instrument_response_function.py b/cosipy/response/instrument_response_function.py new file mode 100644 index 00000000..8aee900e --- /dev/null +++ b/cosipy/response/instrument_response_function.py @@ -0,0 +1,98 @@ +import itertools +from typing import Iterable, Tuple + +import numpy as np +from astropy.coordinates import SkyCoord + +from astropy import units as u +from astropy.units import Quantity + +from histpy import Histogram +from scoords import SpacecraftFrame + +from cosipy.interfaces import EventInterface +from cosipy.interfaces.event import TimeTagEmCDSEventInSCFrameInterface, EmCDSEventInSCFrameInterface +from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface +from cosipy.interfaces.photon_list import PhotonListWithDirectionInterface +from cosipy.interfaces.photon_parameters import PhotonInterface, PhotonWithDirectionAndEnergyInSCFrameInterface +from cosipy.response import FullDetectorResponse +from cosipy.util.iterables import itertools_batched + + +class UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(FarFieldInstrumentResponseFunctionInterface): + + photon_type = PhotonWithDirectionAndEnergyInSCFrameInterface + event_type = EmCDSEventInSCFrameInterface + + def __init__(self, response: FullDetectorResponse, + batch_size = 100000): + + # Get the differential effective area, which is still integrated on each bin at this point + # FarFieldInstrumentResponseFunctionInterface uses cm2 + # First convert and then drop the units + self._diff_area = response.to_dr().project('NuLambda', 'Ei', 'Em', 'Phi', 'PsiChi').to(u.cm * u.cm, copy=False).to(None, copy = False, update = False) + + # Now fix units for the axes + # PhotonWithDirectionAndEnergyInSCFrameInterface has energy in keV + # EmCDSEventInSCFrameInterface has energy in keV, phi in rad + # NuLambda and PsiChi don't have units since these are HealpixAxis. They take SkyCoords + # Copy the axes the first time since they are shared with the response:FullDetectorResponse input + self._diff_area.axes['Ei'] = self._diff_area.axes['Ei'].to(u.keV).to(None, copy = False, update = False) + self._diff_area.axes['Em'] = self._diff_area.axes['Em'].to(u.keV).to(None, copy = False, update = False) + self._diff_area.axes['Phi'] = self._diff_area.axes['Phi'].to(u.rad).to(None, copy = False, update = False) + + # Integrate to get the total effective area + self._area = self._diff_area.project('NuLambda', 'Ei') + + # Now make it differential by dividing by the phasespace + # EmCDSEventInSCFrameInterface energy and phi units have already been taken + # care off. Only PsiChi remains, which is a direction in the sphere, therefore per steradians + energy_phase_space = self._diff_area.axes['Ei'].widths + phi_phase_space = self._diff_area.axes['Phi'].widths + psichi_phase_space = self._diff_area.axes['PsiChi'].pixarea().to_value(u.sr) + + self._diff_area /= self._diff_area.axes.expand_dims(energy_phase_space, 'Em') + self._diff_area /= self._diff_area.axes.expand_dims(phi_phase_space, 'Phi') + self._diff_area /= psichi_phase_space + + self._batch_size = batch_size + + def effective_area_cm2(self, photons: Iterable[PhotonWithDirectionAndEnergyInSCFrameInterface]) -> Iterable[float]: + """ + + """ + + for photon_chunk in itertools_batched(photons, self._batch_size): + + lon, lat, energy_keV = np.asarray([[photon.direction_lon_radians, + photon.direction_lat_radians, + photon.energy_keV] for photon in photon_chunk], dtype=float).transpose() + + direction = SkyCoord(lon, lat, unit = u.rad, frame = SpacecraftFrame()) + + for area_eff in self._area.interp(direction, energy_keV): + yield area_eff + + def differential_effective_area_cm2(self, query: Iterable[Tuple[PhotonWithDirectionAndEnergyInSCFrameInterface, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: + """ + Return the differential effective area (probability density of measuring a given event given a photon times the effective area) + """ + + for query_chunk in itertools_batched(query, self._batch_size): + + # Psi is colatitude (complementary angle) + lon_ph, lat_ph, energy_i_keV, energy_m_keV, phi_rad, psi_comp, chi = \ + np.asarray([[photon.direction_lon_radians, + photon.direction_lat_radians, + photon.energy_keV, + event.energy_keV, + event.scattering_angle_rad, + event.scattered_lat_rad_sc, + event.scattered_lon_rad_sc, + ] for photon,event in query_chunk], dtype=float).transpose() + + direction_ph = SkyCoord(lon_ph, lat_ph, unit = u.rad, frame = SpacecraftFrame()) + psichi = SkyCoord(chi, psi_comp, unit=u.rad, frame=SpacecraftFrame()) + + for diff_area in self._diff_area.interp(direction_ph, energy_i_keV, energy_m_keV, phi_rad, psichi): + yield diff_area \ No newline at end of file diff --git a/cosipy/response/photon_types.py b/cosipy/response/photon_types.py new file mode 100644 index 00000000..61d70a33 --- /dev/null +++ b/cosipy/response/photon_types.py @@ -0,0 +1,51 @@ +from astropy.coordinates import SkyCoord +from scoords import SpacecraftFrame + +from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface, \ + PolarizedPhotonStereographicConventionInSCInterface, \ + PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface, PhotonWithEnergyInterface +from cosipy.polarization import PolarizationAngle + +from astropy import units as u + +class PhotonWithEnergy(PhotonWithEnergyInterface): + + def __init__(self, energy_keV): + self._energy = energy_keV + + @property + def energy_keV(self) -> float: + return self._energy + +class PhotonWithDirectionAndEnergyInSCFrame(PhotonWithEnergy, PhotonWithDirectionAndEnergyInSCFrameInterface): + + frame = SpacecraftFrame() + + def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV): + + super().__init__(energy_keV) + + self._lon = direction_lon_radians + self._lat = direction_lat_radians + + @property + def direction_lon_radians(self) -> float: + return self._lon + + @property + def direction_lat_radians(self) -> float: + return self._lat + +class PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention(PhotonWithDirectionAndEnergyInSCFrame, PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface): + + def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV, polarization_angle_radians): + + super().__init__(direction_lon_radians, direction_lat_radians, energy_keV) + + self._pa = polarization_angle_radians + + @property + def polarization_angle_rad(self) -> float: + return self._pa + + diff --git a/cosipy/response/relative_coordinates.py b/cosipy/response/relative_coordinates.py new file mode 100644 index 00000000..81ed4a56 --- /dev/null +++ b/cosipy/response/relative_coordinates.py @@ -0,0 +1,271 @@ +from typing import Union + +import numpy as np +from astropy.coordinates import SkyCoord, Angle +from astropy.units import Quantity +from cosipy.polarization import PolarizationConvention, StereographicConvention + +from astropy import units as u + +class RelativeCDSCoordinates: + + def __init__(self, + source_direction:Union[SkyCoord, np.ndarray[float]], + pol_convention:PolarizationConvention): + """ + Size N + + Parameters + ---------- + source_direction: SkyCoord or normalized vector (3,N) + pol_convention + """ + + if isinstance(source_direction, SkyCoord): + + # Convert to convention frame + self._frame = pol_convention.frame + self._representation_type = source_direction.representation_type + source_direction = source_direction.transform_to(self._frame) + self._source_vec = self._standardize_vector(source_direction) + + else: + + # Assume it's already in the convention frame + self._frame = None + self._representation_type = None + self._source_vec = source_direction + + self._px, self._py = pol_convention.get_basis_local(source_direction) + + @staticmethod + def _standardize_angle(angle): + if isinstance(angle, (Quantity, Angle)): + angle = angle.to_value(u.rad) + + return np.asarray(angle) + + @staticmethod + def _standardize_vector(direction): + if isinstance(direction, SkyCoord): + direction = direction.cartesian.xyz + + return np.asarray(direction) + + def to_cds(self, phi, az): + """ + From coordinate relative to the source direction to the gamma-ray scattered direction. + + Parameters + ---------- + phi: + Angular distance with respect to the source direction. Can have shape (N,) or (N,M). + az: + Azimuthal angle around the source direction, with a 0-direction defined by the + polarization convention. Same size as phi or broadcastable. + + Returns + ------- + The scattered direction + Shape: + If working with pure vectors: (3, N, M) (or broadcastable, e.g. (3,1,M) + If working with SkyCoord: (N, M) + """ + + # 1. Convert to a numpy array of radians + # 2. Add axis to broadcast with x,y,z coordinates + phi = self._standardize_angle(phi) + az = self._standardize_angle(az) + + # Get the right shape for broadcasting + phi,az = np.broadcast_arrays(phi, az) + phi = phi[np.newaxis] + az = az[np.newaxis] + new_dims = tuple(range(self._source_vec.ndim, phi.ndim)) + source_vec = np.expand_dims(self._source_vec, new_dims) + px = np.expand_dims(self._px, new_dims) + py = np.expand_dims(self._py, new_dims) + + # Sum over each basis vector, without allocating multiple arrays + psichi_vec = px * np.cos(az) + psichi_vec += py * np.sin(az) + psichi_vec *= np.sin(phi) + psichi_vec += source_vec * np.cos(phi) + + + # Convert to skycoord if needed + if self._frame is not None: + + psichi = SkyCoord(*psichi_vec, + representation_type='cartesian', + frame=self._frame) + + psichi.representation_type = self._representation_type + + return psichi + + else: + + return psichi_vec + + def to_relative(self, psichi:Union[SkyCoord, np.ndarray[float]]): + """ + From the absolute scattered direction, to the coordinates relative + to the source direction. + + Parameters + ---------- + psichi: + Scattered direction + Can have shape: + - Vector: (3,N) or (3,N,M) (or broadcastable, e.g. (3,1,M) + - Skycoord: (N,) or (N,M). + + Returns + ------- + phi,az: + phi: Angular distance with respect to the source direction. + az: Azimuthal angle around the source direction, with a 0-direction defined by the + polarization convention. + Each with shape (N,M). Angles. + """ + + psichi_vec = self._standardize_vector(psichi) + + # Adjust dimensions for broadcasting + new_dims = tuple(range(self._source_vec.ndim, psichi_vec.ndim)) + source_vec = np.expand_dims(self._source_vec, new_dims) + px = np.expand_dims(self._px, new_dims) + py = np.expand_dims(self._py, new_dims) + + # Get the psichi_perp_vec component along each basis vector + # This is equivalent to + # psichi_px_component = np.sum(px * psichi_perp_vec, axis=0) + # for each component + # but it does not allocate the temporary px*psichi_perp_vec results + # and performs the full operation in one step + psichi_px_component, psichi_py_component, psichi_source_component = \ + np.einsum('ji...,ji...->j...',[px,py,source_vec], psichi_vec[np.newaxis]) + + # Get the angle from the vector + phi = np.arccos(psichi_source_component) + az = np.arctan2(psichi_py_component, psichi_px_component) + + return Angle(phi, unit=u.rad, copy=False), Angle(az, unit=u.rad, copy=False) + + @staticmethod + def get_relative_cds_phase_space(phi_min = None, phi_max = None, arm_min = None, arm_max = None, az_min = None, az_max = None): + """ + The CDS is described by: + phi: the polar scattering angle + psichi: the direction of the scattered gamma + + Given a source direction, psichi can be parametrized with + - arm equals the minimum angular distance between the psichi and a cone centered at the source direction + with hald-opening angle equal to phi + - az: the azimuthal angle around the source direction + + The total phase space of psichi is that of the sphere. If psi is the colatitude and chi the longitude, then + dV = sin(psi) dphi dpsi dchi + + The total phase space is pi (from phi) time 4*pi (from psichi, that is the sphere area) + + In the reparametrization, this is + dV = sin(phi + arm) dphi darm daz + + While the total phase space remains unchanged, in order to integrate this volume in arbitrary limits + you need take into account the fact that phi+arm range is limited to [0,pi]. + + This function performs such integration by checking all possible integration limit cases. + + Parameters + ---------- + phi_min: Defaults to 0 + phi_max: default to pi + arm_min: default to -pi + arm_max: default to pi + az_min: default to 0 + az_max: default to 2*pi + + Returns + ------- + Phase space + """ + + if phi_min is None: + phi_min = 0 + + if phi_max is None: + phi_max = np.pi + + if arm_min is None: + arm_min = -np.pi + + if arm_max is None: + arm_max = np.pi + + if az_min is None: + az_min = 0 + + if az_max is None: + az_max = 2*np.pi + + phi_min = RelativeCDSCoordinates._standardize_angle(phi_min) + phi_max = RelativeCDSCoordinates._standardize_angle(phi_max) + arm_min = RelativeCDSCoordinates._standardize_angle(arm_min) + arm_max = RelativeCDSCoordinates._standardize_angle(arm_max) + az_min = RelativeCDSCoordinates._standardize_angle(az_min) + az_max = RelativeCDSCoordinates._standardize_angle(az_max) + + phi_min, phi_max, arm_min, arm_max, az_min, az_max = np.broadcast_arrays(phi_min, phi_max, arm_min, arm_max, az_min, az_max) + + # Handle cases in between the physical boundaries + # Integrate excluding unphysical corners + # Remove unphysical rectangles + arm_min = np.choose((arm_min < -phi_max) & (-phi_max < arm_max), [arm_min, -phi_max]) + arm_max = np.choose((arm_min < np.pi - phi_min) & (np.pi - phi_min < arm_max), [arm_max, np.pi - phi_min]) + + phi_min = np.choose((phi_min < -arm_max) & (-arm_max < phi_max), [phi_min, -arm_max]) + phi_max = np.choose((phi_min < np.pi - arm_min) & (np.pi - arm_min < phi_max), [phi_max, np.pi - arm_min]) + + integral_rect = (az_max - az_min) * ( + -np.sin(arm_min + phi_min) + np.sin(arm_max + phi_min) + np.sin(arm_min + phi_max) - np.sin(arm_max + phi_max)) + + # Remove unphysical corners (triangles or trapezoids) + # Note the (phi1 + arm1) and (phi2 + arm2) masks in front + + # Lower left corner (low phi, low arm) + # Integrate[Sin[phi+arm],{phi,phi1,phi2},{arm,arm1, -phi}]//FullSimplify + phil = np.maximum(-arm_max, phi_min) + phih = np.minimum(-arm_min, phi_max) + unphys_lowerleft_integral = -phih + phil + np.sin(arm_min + phih) - np.sin(arm_min + phil) + unphys_lowerleft_integral *= (phil + arm_min < 0) + integral = integral_rect - (az_max - az_min) * unphys_lowerleft_integral + + # Upper right corner (high phi, high arm) + # Integrate[Sin[phi+arm],{phi,phi1,phi2}, {arm, \[Pi]-phi, arm2}]//FullSimplify + phil = np.maximum(np.pi - arm_max, phi_min) + phih = np.minimum(np.pi - arm_min, phi_max) + unphys_upperright_integral = phil - phih + np.sin(arm_max + phil) - np.sin(arm_max + phih) + unphys_upperright_integral *= (phih + arm_max > np.pi) + integral -= (az_max - az_min) * unphys_upperright_integral + + # Handle fully physical or fully unphysical + fully_phys = (phi_min + arm_min >= 0) & (phi_max + arm_max <= np.pi) + fully_unphys = (phi_max + arm_max <= 0) | (phi_min + arm_min >= np.pi) + + # Mathematica: Integrate[Sin[phi+arm], {phi,phi1,phi2} , {arm,arm1,arm2}]//FullSimplify + integral_full = (az_max - az_min) * ( + -np.sin(arm_min + phi_min) + np.sin(arm_max + phi_min) + np.sin(arm_min + phi_max) - np.sin(arm_max + phi_max)) + + if integral.ndim == 0: + if fully_phys: + return integral + if fully_unphys: + return 0 + else: + integral[fully_phys] = integral_full[fully_phys] + integral[fully_unphys] = 0 + + return integral + diff --git a/cosipy/response/rsp_to_arf_rmf.py b/cosipy/response/rsp_to_arf_rmf.py new file mode 100644 index 00000000..afdef1a0 --- /dev/null +++ b/cosipy/response/rsp_to_arf_rmf.py @@ -0,0 +1,543 @@ +import logging +logger = logging.getLogger(__name__) + +from cosipy.spacecraftfile.spacecraft_file import SpacecraftHistory + +import numpy as np +import astropy.units as u +from astropy.io import fits +from astropy.coordinates import SkyCoord + +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm + +from cosipy.response import FullDetectorResponse + +class RspArfRmfConverter: + + def __init__(self, response:FullDetectorResponse, ori:SpacecraftHistory, target_coord:SkyCoord): + + self.response = response + self.ori = ori + + self.dwell_map = self.ori.get_dwell_map(target_coord, nside = response.nside, scheme = response.scheme) + + def get_psr_rsp(self): + + """ + Generates the point source response based on the response file and dwell obstime map. + livetime is used to find the exposure obstime for this observation. + + Parameters + ---------- + :response : str or pathlib.Path, optional + The response for the observation (the defaul is `None`, which implies that the `response` will be read from the instance). + dwell_map : str, optional + The obstime dwell map for the source, you can load saved dwell obstime map using this parameter if you've saved it before (the defaul is `None`, which implies that the `dwell_map` will be read from the instance). + dts : numpy.ndarray or str, optional + The elapsed obstime for each pointing. It must has the same size as the pointings. If you have saved this array, you can load it using this parameter (the defaul is `None`, which implies that the `livetime` will be read from the instance). + + Returns + ------- + Ei_edges : numpy.ndarray + The edges of the incident energy. + Ei_lo : numpy.ndarray + The lower edges of the incident energy. + Ei_hi : numpy.ndarray + The upper edges of the incident energy. + Em_edges : numpy.ndarray + The edges of the measured energy. + Em_lo : numpy.ndarray + The lower edges of the measured energy. + Em_hi : numpy.ndarray + The upper edges of the measured energy. + areas : numpy.ndarray + The effective area of each energy bin. + matrix : numpy.ndarray + The energy dispersion matrix. + pa_convention : str, optional + Polarization convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') + """ + + with self.response as response: + + # get point source response + self.psr = response.get_point_source_response(self.dwell_map) + + self.Ei_edges = np.array(response.axes['Ei'].edges) + self.Ei_lo = np.float32(self.Ei_edges[:-1]) # use float32 to match the requirement of the data type + self.Ei_hi = np.float32(self.Ei_edges[1:]) + + self.Em_edges = np.array(response.axes['Em'].edges) + self.Em_lo = np.float32(self.Em_edges[:-1]) + self.Em_hi = np.float32(self.Em_edges[1:]) + + # get the effective area and matrix + logger.info("Getting the effective area ...") + self.areas = np.float32(np.array(self.psr.project('Ei').to_dense().contents)) / self.ori.livetime.to_value( + u.second).sum() + spectral_response = np.float32(np.array(self.psr.project(['Ei', 'Em']).to_dense().contents)) + self.matrix = np.float32(np.zeros((self.Ei_lo.size, self.Em_lo.size))) # initate the matrix + + logger.info("Getting the energy redistribution matrix ...") + for i in np.arange(self.Ei_lo.size): + new_raw = spectral_response[i, :] / spectral_response[i, :].sum() + self.matrix[i, :] = new_raw + self.matrix = self.matrix.T + + return self.Ei_edges, self.Ei_lo, self.Ei_hi, self.Em_edges, self.Em_lo, self.Em_hi, self.areas, self.matrix + + def get_arf(self, out_name=None): + + """ + Converts the point source response to an arf file that can be read by XSPEC. + + Parameters + ---------- + out_name: str, optional + The name of the arf file to save. (the default is `None`, which implies that the saving name will be the target name of the instance). + """ + + if out_name is None: + self.out_name = self.target_name + else: + self.out_name = out_name + + # blow write the arf file + copyright_string = " FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " + + ## Create PrimaryHDU + primaryhdu = fits.PrimaryHDU() # create an empty primary HDU + primaryhdu.header[ + "BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value + primaryhdu.header["COMMENT"] = copyright_string # add comments + primaryhdu.header # print headers and their values + + col1_energ_lo = fits.Column(name="ENERG_LO", format="E", unit="keV", array=self.Em_lo) + col2_energ_hi = fits.Column(name="ENERG_HI", format="E", unit="keV", array=self.Em_hi) + col3_specresp = fits.Column(name="SPECRESP", format="E", unit="cm**2", array=self.areas) + cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, + col3_specresp]) # create a ColDefs (column-definitions) object for all columns + specresp_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object + + specresp_bintablehdu.header.comments["TTYPE1"] = "label for field 1" + specresp_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" + specresp_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" + specresp_bintablehdu.header.comments["TTYPE2"] = "label for field 2" + specresp_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" + specresp_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" + specresp_bintablehdu.header.comments["TTYPE3"] = "label for field 3" + specresp_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" + specresp_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" + + specresp_bintablehdu.header["EXTNAME"] = ("SPECRESP", "name of this binary table extension") + specresp_bintablehdu.header["TELESCOP"] = ("COSI", "mission/satellite name") + specresp_bintablehdu.header["INSTRUME"] = ("COSI", "instrument/detector name") + specresp_bintablehdu.header["FILTER"] = ("NONE", "filter in use") + specresp_bintablehdu.header["HDUCLAS1"] = ("RESPONSE", "dataset relates to spectral response") + specresp_bintablehdu.header["HDUCLAS2"] = ("SPECRESP", "extension contains an ARF") + specresp_bintablehdu.header["HDUVERS"] = ("1.1.0", "version of format") + + new_arfhdus = fits.HDUList([primaryhdu, specresp_bintablehdu]) + new_arfhdus.writeto(f'{self.out_name}.arf', overwrite=True) + + return + + def get_rmf(self, out_name=None): + + """ + Converts the point source response to an rmf file that can be read by XSPEC. + + Parameters + ---------- + out_name: str, optional + The name of the arf file to save. (the default is None, which implies that the saving name will be the target name of the instance). + """ + + if out_name is None: + self.out_name = self.target_name + else: + self.out_name = out_name + + # blow write the arf file + copyright_string = " FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " + + ## Create PrimaryHDU + primaryhdu = fits.PrimaryHDU() # create an empty primary HDU + primaryhdu.header[ + "BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value + primaryhdu.header["COMMENT"] = copyright_string # add comments + primaryhdu.header # print headers and their values + + ## Create binary table HDU for MATRIX + ### prepare colums + energ_lo = [] + energ_hi = [] + n_grp = [] + f_chan = [] + n_chan = [] + matrix = [] + for i in np.arange(len(self.Ei_lo)): + energ_lo_temp = np.float32(self.Em_lo[i]) + energ_hi_temp = np.float32(self.Ei_hi[i]) + + if self.matrix[:, i].sum() != 0: + nz_matrix_idx = np.nonzero(self.matrix[:, i])[0] # non-zero index for the matrix + subsets = np.split(nz_matrix_idx, np.where(np.diff(nz_matrix_idx) != 1)[0] + 1) + n_grp_temp = np.int16(len(subsets)) + f_chan_temp = [] + n_chan_temp = [] + matrix_temp = [] + for m in np.arange(n_grp_temp): + f_chan_temp += [subsets[m][0]] + n_chan_temp += [len(subsets[m])] + for m in nz_matrix_idx: + matrix_temp += [self.matrix[:, i][m]] + f_chan_temp = np.int16(np.array(f_chan_temp)) + n_chan_temp = np.int16(np.array(n_chan_temp)) + matrix_temp = np.float32(np.array(matrix_temp)) + else: + n_grp_temp = np.int16(0) + f_chan_temp = np.int16(np.array([0])) + n_chan_temp = np.int16(np.array([0])) + matrix_temp = np.float32(np.array([0])) + + energ_lo.append(energ_lo_temp) + energ_hi.append(energ_hi_temp) + n_grp.append(n_grp_temp) + f_chan.append(f_chan_temp) + n_chan.append(n_chan_temp) + matrix.append(matrix_temp) + + col1_energ_lo = fits.Column(name="ENERG_LO", format="E", unit="keV", array=energ_lo) + col2_energ_hi = fits.Column(name="ENERG_HI", format="E", unit="keV", array=energ_hi) + col3_n_grp = fits.Column(name="N_GRP", format="I", array=n_grp) + col4_f_chan = fits.Column(name="F_CHAN", format="PI(54)", array=f_chan) + col5_n_chan = fits.Column(name="N_CHAN", format="PI(54)", array=n_chan) + col6_n_chan = fits.Column(name="MATRIX", format="PE(161)", array=matrix) + cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, col3_n_grp, col4_f_chan, col5_n_chan, + col6_n_chan]) # create a ColDefs (column-definitions) object for all columns + matrix_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object + + matrix_bintablehdu.header.comments["TTYPE1"] = "label for field 1 " + matrix_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" + matrix_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" + matrix_bintablehdu.header.comments["TTYPE2"] = "label for field 2" + matrix_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" + matrix_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" + matrix_bintablehdu.header.comments["TTYPE3"] = "label for field 3 " + matrix_bintablehdu.header.comments["TFORM3"] = "data format of field: 2-byte INTEGER" + matrix_bintablehdu.header.comments["TTYPE4"] = "label for field 4" + matrix_bintablehdu.header.comments["TFORM4"] = "data format of field: variable length array" + matrix_bintablehdu.header.comments["TTYPE5"] = "label for field 5" + matrix_bintablehdu.header.comments["TFORM5"] = "data format of field: variable length array" + matrix_bintablehdu.header.comments["TTYPE6"] = "label for field 6" + matrix_bintablehdu.header.comments["TFORM6"] = "data format of field: variable length array" + + matrix_bintablehdu.header["EXTNAME"] = ("MATRIX", "name of this binary table extension") + matrix_bintablehdu.header["TELESCOP"] = ("COSI", "mission/satellite name") + matrix_bintablehdu.header["INSTRUME"] = ("COSI", "instrument/detector name") + matrix_bintablehdu.header["FILTER"] = ("NONE", "filter in use") + matrix_bintablehdu.header["CHANTYPE"] = ("PI", "total number of detector channels") + matrix_bintablehdu.header["DETCHANS"] = (len(self.Em_lo), "total number of detector channels") + matrix_bintablehdu.header["HDUCLASS"] = ("OGIP", "format conforms to OGIP standard") + matrix_bintablehdu.header["HDUCLAS1"] = ("RESPONSE", "dataset relates to spectral response") + matrix_bintablehdu.header["HDUCLAS2"] = ("RSP_MATRIX", "dataset is a spectral response matrix") + matrix_bintablehdu.header["HDUVERS"] = ("1.3.0", "version of format") + matrix_bintablehdu.header["TLMIN4"] = (0, "minimum value legally allowed in column 4") + + ## Create binary table HDU for EBOUNDS + channels = np.int16(np.arange(len(self.Em_lo))) + e_min = np.float32(self.Em_lo) + e_max = np.float32(self.Em_hi) + + col1_channels = fits.Column(name="CHANNEL", format="I", array=channels) + col2_e_min = fits.Column(name="E_MIN", format="E", unit="keV", array=e_min) + col3_e_max = fits.Column(name="E_MAX", format="E", unit="keV", array=e_max) + cols = fits.ColDefs([col1_channels, col2_e_min, col3_e_max]) + ebounds_bintablehdu = fits.BinTableHDU.from_columns(cols) + + ebounds_bintablehdu.header.comments["TTYPE1"] = "label for field 1" + ebounds_bintablehdu.header.comments["TFORM1"] = "data format of field: 2-byte INTEGER" + ebounds_bintablehdu.header.comments["TTYPE2"] = "label for field 2" + ebounds_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" + ebounds_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" + ebounds_bintablehdu.header.comments["TTYPE3"] = "label for field 3" + ebounds_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" + ebounds_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" + + ebounds_bintablehdu.header["EXTNAME"] = ("EBOUNDS", "name of this binary table extension") + ebounds_bintablehdu.header["TELESCOP"] = ("COSI", "mission/satellite") + ebounds_bintablehdu.header["INSTRUME"] = ("COSI", "nstrument/detector name") + ebounds_bintablehdu.header["FILTER"] = ("NONE", "filter in use") + ebounds_bintablehdu.header["CHANTYPE"] = ("PI", "channel type (PHA or PI)") + ebounds_bintablehdu.header["DETCHANS"] = (len(self.Em_lo), "total number of detector channels") + ebounds_bintablehdu.header["HDUCLASS"] = ("OGIP", "format conforms to OGIP standard") + ebounds_bintablehdu.header["HDUCLAS1"] = ("RESPONSE", "dataset relates to spectral response") + ebounds_bintablehdu.header["HDUCLAS2"] = ("EBOUNDS", "dataset is a spectral response matrix") + ebounds_bintablehdu.header["HDUVERS"] = ("1.2.0", "version of format") + + new_rmfhdus = fits.HDUList([primaryhdu, matrix_bintablehdu, ebounds_bintablehdu]) + new_rmfhdus.writeto(f'{self.out_name}.rmf', overwrite=True) + + return + + def get_pha(self, src_counts, errors, rmf_file=None, arf_file=None, bkg_file=None, exposure_time=None, dts=None, + telescope="COSI", instrument="COSI"): + + """ + Generate the pha file that can be read by XSPEC. This file stores the counts info of the source. + + Parameters + ---------- + src_counts : numpy.ndarray + The counts in each energy band. If you have src_counts with unit counts/kev/s, you must convert it to counts by multiplying it with exposure obstime and the energy band width. + errors : numpy.ndarray + The error for counts. It has the same unit requirement as src_counts. + rmf_file : str, optional + The rmf file name to be written into the pha file (the default is `None`, which implies that it uses the rmf file generate by function `get_rmf`) + arf_file : str, optional + The arf file name to be written into the pha file (the default is `None`, which implies that it uses the arf file generate by function `get_arf`) + bkg_file : str, optional + The background file name (the default is `None`, which implied the `src_counts` is source counts only). + exposure_time : float, optional + The exposure obstime for this source observation (the default is `None`, which implied that the exposure obstime will be calculated by `livetime`). + dts : numpy.ndarray, optional + It's used to calculate the exposure obstime. It has the same effect as `exposure_time`. If both `exposure_time` and `livetime` are given, `livetime` will write over the exposure_time (the default is `None`, which implies that the `livetime` will be read from the instance). + telescope : str, optional + The name of the telecope (the default is "COSI"). + instrument : str, optional + The instrument name (the default is "COSI"). + """ + + self.src_counts = src_counts + self.errors = errors + + if bkg_file is None: + self.bkg_file = bkg_file + else: + self.bkg_file = "None" + + self.bkg_file = bkg_file + + if rmf_file is None: + self.rmf_file = rmf_file + else: + self.rmf_file = f'{self.out_name}.rmf' + + if arf_file is None: + self.arf_file = arf_file + else: + self.arf_file = f'{self.out_name}.arf' + + if exposure_time is not None: + self.exposure_time = exposure_time + elif dts is not None: + livetime = self.__str_or_array(dts) + self.exposure_time = livetime.sum() + self.telescope = telescope + self.instrument = instrument + self.channel_number = len(self.src_counts) + + # define other hardcoded inputs + copyright_string = " FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " + channels = np.arange(self.channel_number) + + # Create PrimaryHDU + primaryhdu = fits.PrimaryHDU() # create an empty primary HDU + primaryhdu.header[ + "BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value + primaryhdu.header["COMMENT"] = copyright_string # add comments + primaryhdu.header["TELESCOP"] = telescope # add telescope keyword valie + primaryhdu.header["INSTRUME"] = instrument # add instrument keyword valie + primaryhdu.header # print headers and their values + + # Create binary table HDU + a1 = np.array(channels, dtype="int32") # I guess I need to convert the dtype to match the format J + a2 = np.array(self.src_counts, dtype="int64") # int32 is not enough for counts + a3 = np.array(self.errors, dtype="int64") # int32 is not enough for errors + col1 = fits.Column(name="CHANNEL", format="J", array=a1) + col2 = fits.Column(name="COUNTS", format="K", array=a2, unit="count") + col3 = fits.Column(name="STAT_ERR", format="K", array=a3, unit="count") + cols = fits.ColDefs([col1, col2, col3]) # create a ColDefs (column-definitions) object for all columns + bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object + + # add other BinTableHDU hear keywords,their values, and comments + bintablehdu.header.comments["TTYPE1"] = "label for field 1" + bintablehdu.header.comments["TFORM1"] = "data format of field: 32-bit integer" + bintablehdu.header.comments["TTYPE2"] = "label for field 2" + bintablehdu.header.comments["TFORM2"] = "data format of field: 32-bit integer" + bintablehdu.header.comments["TUNIT2"] = "physical unit of field 2" + + bintablehdu.header["EXTNAME"] = ("SPECTRUM", "name of this binary table extension") + bintablehdu.header["TELESCOP"] = (self.telescope, "telescope/mission name") + bintablehdu.header["INSTRUME"] = (self.instrument, "instrument/detector name") + bintablehdu.header["FILTER"] = ("NONE", "filter type if any") + bintablehdu.header["EXPOSURE"] = (self.exposure_time, "integration obstime in seconds") + bintablehdu.header["BACKFILE"] = (self.bkg_file, "background filename") + bintablehdu.header["BACKSCAL"] = (1, "background scaling factor") + bintablehdu.header["CORRFILE"] = ("NONE", "associated correction filename") + bintablehdu.header["CORRSCAL"] = (1, "correction file scaling factor") + bintablehdu.header["CORRSCAL"] = (1, "correction file scaling factor") + bintablehdu.header["RESPFILE"] = (self.rmf_file, "associated rmf filename") + bintablehdu.header["ANCRFILE"] = (self.arf_file, "associated arf filename") + bintablehdu.header["AREASCAL"] = (1, "area scaling factor") + bintablehdu.header["STAT_ERR"] = (0, "statistical error specified if any") + bintablehdu.header["SYS_ERR"] = (0, "systematic error specified if any") + bintablehdu.header["GROUPING"] = (0, "grouping of the data has been defined if any") + bintablehdu.header["QUALITY"] = (0, "data quality information specified") + bintablehdu.header["HDUCLASS"] = ("OGIP", "format conforms to OGIP standard") + bintablehdu.header["HDUCLAS1"] = ("SPECTRUM", "PHA dataset") + bintablehdu.header["HDUVERS"] = ("1.2.1", "version of format") + bintablehdu.header["POISSERR"] = (False, "Poissonian errors to be assumed, T as True") + bintablehdu.header["CHANTYPE"] = ("PI", "channel type (PHA or PI)") + bintablehdu.header["DETCHANS"] = (self.channel_number, "total number of detector channels") + + new_phahdus = fits.HDUList([primaryhdu, bintablehdu]) + new_phahdus.writeto(f'{self.out_name}.pha', overwrite=True) + + return + + def plot_arf(self, file_name=None, save_name=None, dpi=300): + + """ + Read the arf fits file, plot and save it. + + Parameters + ---------- + file_name: str, optional + The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). + save_name: str, optional + The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). + dpi: int, optional + The dpi of the saved image (the default is 300). + """ + + if file_name != None: + self.file_name = file_name + else: + self.file_name = f'{self.out_name}.arf' + + if save_name != None: + self.save_name = save_name + else: + self.save_name = self.out_name + + self.dpi = dpi + + self.arf = fits.open(self.file_name) # read file + + # SPECRESP HDU + self.specresp_hdu = self.arf["SPECRESP"] + + self.areas = np.array(self.specresp_hdu.data["SPECRESP"]) + self.Em_lo = np.array(self.specresp_hdu.data["ENERG_LO"]) + self.Em_hi = np.array(self.specresp_hdu.data["ENERG_HI"]) + + E_center = (self.Em_lo + self.Em_hi) / 2 + E_edges = np.append(self.Em_lo, self.Em_hi[-1]) + + fig, ax = plt.subplots() + ax.hist(E_center, E_edges, weights=self.areas, histtype='step') + + ax.set_title("Effective area") + ax.set_xlabel("Energy[$keV$]") + ax.set_ylabel(r"Effective area [$cm^2$]") + ax.set_xscale("log") + fig.savefig(f"Effective_area_for_{self.save_name}.png", bbox_inches="tight", pad_inches=0.1, dpi=self.dpi) + # fig.show() + + return + + def plot_rmf(self, file_name=None, save_name=None, dpi=300): + + """ + Read the rmf fits file, plot and save it. + + Parameters + ---------- + file_name: str, optional + The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). + save_name: str, optional + The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). + dpi: int, optional + The dpi of the saved image (the default is 300). + """ + + if file_name != None: + self.file_name = file_name + else: + self.file_name = f'{self.out_name}.rmf' + + if save_name != None: + self.save_name = save_name + else: + self.save_name = self.out_name + + self.dpi = dpi + + # Read rmf file + self.rmf = fits.open(self.file_name) # read file + + # Read the ENOUNDS information + ebounds_ext = self.rmf["EBOUNDS"] + channel_low = ebounds_ext.data[ + "E_MIN"] # energy bin lower edges for channels (channels are just incident energy bins) + channel_high = ebounds_ext.data[ + "E_MAX"] # energy bin higher edges for channels (channels are just incident energy bins) + + # Read the MATRIX extension + matrix_ext = self.rmf['MATRIX'] + # logger.info(repr(matrix_hdu.header[:60])) + energy_low = matrix_ext.data["ENERG_LO"] # energy bin lower edges for measured energies + energy_high = matrix_ext.data["ENERG_HI"] # energy bin higher edges for measured energies + data = matrix_ext.data + + # Create a 2-d numpy array and store probability data into the redistribution matrix + rmf_matrix = np.zeros((len(energy_low), len(channel_low))) # create an empty matrix + for i in np.arange(data.shape[0]): # i is the measured energy index, examine the matrix_ext.data rows by rows + if data[i][5].sum() == 0: # if the sum of probabilities is zero, then skip since there is no data at all + pass + else: + # measured_energy_index = np.argwhere(energy_low == data[157][0])[0][0] + f_chan = data[i][3] # get the starting channel of each subsets + n_chann = data[i][4] # get the number of channels in each subsets + matrix = data[i][5] # get the probabilities of this row (incident energy) + indices = [] + for k in f_chan: + channels = 0 + channels = np.arange(k, k + n_chann[np.argwhere(f_chan == k)]).tolist() # generate the cha + indices += channels # fappend the channels togeter + indices = np.array(indices) + for m in indices: + rmf_matrix[i][m] = matrix[ + np.argwhere(indices == m)[0][0]] # write the probabilities into the empty matrix + + # plot the redistribution matrix + xcenter = np.divide(energy_low + energy_high, 2) + x_center_coords = np.repeat(xcenter, 10) + y_center_coords = np.tile(xcenter, 10) + energy_all_edges = np.append(energy_low, energy_high[-1]) + # bin_edges = np.array([incident_energy_bins,incident_energy_bins]) # doesn't work + bin_edges = np.vstack((energy_all_edges, energy_all_edges)) + # logger.info(bin_edges) + + self.probability = [] + for i in np.arange(10): + for j in np.arange(10): + self.probability.append(rmf_matrix[i][j]) + # logger.info(type(probability)) + + plt.hist2d(x=x_center_coords, y=y_center_coords, weights=self.probability, bins=bin_edges, norm=LogNorm()) + plt.xscale('log') + plt.yscale('log') + plt.xlabel("Incident energy [$keV$]") + plt.ylabel("Measured energy [$keV$]") + plt.title("Redistribution matrix") + # plt.xlim([70,10000]) + # plt.ylim([70,10000]) + plt.colorbar(norm=LogNorm()) + plt.savefig(f"Redistribution_matrix_for_{self.save_name}.png", bbox_inches="tight", pad_inches=0.1, dpi=300) + # plt.show() + + return \ No newline at end of file diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py new file mode 100644 index 00000000..d27ad1a8 --- /dev/null +++ b/cosipy/response/threeml_point_source_response.py @@ -0,0 +1,214 @@ +import logging +from pathlib import Path +from typing import Union + +from mhealpy import HealpixBase + +from cosipy.data_io import EmCDSBinnedData +from cosipy.interfaces.instrument_response_interface import BinnedInstrumentResponseInterface +from cosipy.polarization.polarization_axis import PolarizationAxis +from cosipy.threeml.util import to_linear_polarization + +logger = logging.getLogger(__name__) + +import copy + +from astromodels.sources import Source, PointSource +from scoords import SpacecraftFrame +from histpy import Axes, Histogram, Axis, HealpixAxis +from cosipy.interfaces import BinnedThreeMLSourceResponseInterface, BinnedDataInterface, DataInterface + +from cosipy.response import FullDetectorResponse, PointSourceResponse +from cosipy.spacecraftfile import SpacecraftHistory, SpacecraftAttitudeMap + +from mhealpy import HealpixMap + +__all__ = ["BinnedThreeMLPointSourceResponse"] + +class BinnedThreeMLPointSourceResponse(BinnedThreeMLSourceResponseInterface): + """ + COSI 3ML plugin. + + Parameters + ---------- + dr: + Full detector response handle, or the file path + sc_history: + Contains the information of the orientation: timestamps (astropy.Time) and attitudes (scoord.Attitude) that describe + the spacecraft for the duration of the data included in the analysis + """ + + def __init__(self, + data:EmCDSBinnedData, + instrument_response: BinnedInstrumentResponseInterface, + sc_history: SpacecraftHistory, + energy_axis:Axis, + polarization_axis:PolarizationAxis = None, + nside = None + ): + """ + + Parameters + ---------- + instrument_response: + A BinnedInstrumentResponseInterface capable of providing the differential + effective area in local coordinates as a function of direction, energy and + polarization. + sc_history: + The SpacecraftHistory describing the SC orbit and attitude vs time. + energy_axis: + The desired effective binning of the photon energy (aka Ei) + polarization_axis: + The desired effective binning of the photon polarization angle (aka Pol). + This also defined the polarization coordinate system and convention. + nside: + - If transformation from local to an inertial system is needed, the spacecraft + attitude will be first discretized based on this nside. + - If local, this is the nside of the dwell time map + """ + + # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface + + + # Interface inputs + self._source = None + + # Other implementation inputs + self._data = data + + self._sc_ori = sc_history + self._response = instrument_response + self._energy_axis = energy_axis + self._polarization_axis = polarization_axis + self._nside = nside + + # Cache + # Prevent unnecessary calculations and new memory allocations + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_source_dict = None + + self._expectation = None + + # The PSR change for each direction, but it's the same for all spectrum parameters + + # Source location cached separately since changing the response + # for a given direction is expensive + self._last_convolved_source_skycoord = None + + self._psr = None + + def clear_cache(self): + + self._last_convolved_source_dict = None + self._expectation = None + self._last_convolved_source_skycoord = None + self._psr = None + + def copy(self) -> "BinnedThreeMlPointSourceResponse": + """ + Safe copy to use for multiple sources + Returns + ------- + A copy than can be used safely to convolve another source + """ + new = copy.copy(self) + new.clear_cache() + return new + + def set_source(self, source: Source): + """ + The source is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + if (to_linear_polarization(source.spectrum.main.polarization) is not None and + self._polarization_axis is None): + raise RuntimeError("This response can't handle a polarized source.") + + self._source = source + + def expectation(self, copy = True)-> Histogram: + # TODO: check coordsys from axis + # TODO: Earth occ always true in this case + + if self._data is None: + raise RuntimeError("Call set_source() first.") + + if self._source is None: + raise RuntimeError("Call set_source() first.") + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + source_dict = self._source.to_dict() + + coord = self._source.position.sky_coord + + # Use cached expectation if nothing has changed + if self._expectation is not None and self._last_convolved_source_dict == source_dict: + if copy: + return self._expectation.copy() + else: + return self._expectation + + # Expectation calculation + + # Check if the source position change, since these operations + # are expensive + if self._psr is None or coord != self._last_convolved_source_skycoord: + + coordsys = self._data.axes["PsiChi"].coordsys + + logger.info("... Calculating point source response ...") + + if isinstance(coordsys, SpacecraftFrame): + # Local coordinates + + dwell_time_map = self._sc_ori.get_dwell_map(coord, nside = self._nside) + + self._psr = PointSourceResponse.from_dwell_time_map(self._data, + self._response, + dwell_time_map, + self._energy_axis, + self._polarization_axis) + + else: + # Inertial e..g. galactic + + scatt_map = self._sc_ori.get_scatt_map(nside=self._nside, + target_coord=coord, + earth_occ=True) + + self._psr = PointSourceResponse.from_scatt_map(coord, + self._data, + self._response, + scatt_map, + self._energy_axis, + self._polarization_axis) + + logger.info(f"--> done (source name : {self._source.name})") + + # Convolve with spectrum + self._expectation = self._psr.get_expectation(self._source.spectrum.main.shape, + self._source.spectrum.main.polarization) + + # Check if axes match + if self._data.axes != self._expectation.axes: + raise ValueError( + "Currently, the expectation axes must exactly match the detector response measurement axes") + + # Cache. Use dict and copy since the internal variables can change + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_source_dict = source_dict + self._last_convolved_source_skycoord = coord.copy() + + # Copy to prevent user to modify our cache + if copy: + return self._expectation.copy() + else: + return self._expectation diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py new file mode 100644 index 00000000..b00bb8a1 --- /dev/null +++ b/cosipy/response/threeml_response.py @@ -0,0 +1,174 @@ +import copy +from typing import Dict + +from numba.typed.dictobject import DictModel + +from cosipy.interfaces import BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, \ + BinnedDataInterface, DataInterface, ThreeMLSourceResponseInterface + +from astromodels import Model +from astromodels.sources import PointSource, ExtendedSource + +from histpy import Axes, Histogram + +__all__ = ["BinnedThreeMLModelFolding"] + +class ThreeMLModelFoldingCacheSourceResponsesMixin: + """ + Avoid duplicating code that is the same for the binned and unbinned case + + Needs: + self._model, + """ + _model: Model + _source_responses: Dict[str, ThreeMLSourceResponseInterface] + _psr: ThreeMLSourceResponseInterface + _esr: ThreeMLSourceResponseInterface + _cached_model_dict: dict + + def _cache_source_responses(self): + """ + Create a copy of the PSR and ESR for each source + + Returns True if there was any update + + Updates _cached_model_dict and _source_responses + """ + + if self._model is None: + raise RuntimeError("Call set_model() first.") + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + current_model_dict = self._model.to_dict() + + # TODO: currently Model.__eq__ seems broken. It returns. True even + # if the internal parameters changed. Caching the expected value + # is not implemented. Remove the "False and" when fixed + if self._cached_model_dict is not None and self._cached_model_dict == current_model_dict: + # Nothing to do + return False + + # This accounts for the possibility of some sources being added or + # removed from the model. + new_source_responses = {} + + for name, source in self._model.sources.items(): + + if name in self._source_responses: + # Use cache + new_source_responses[name] = self._source_responses[name] + continue + + if isinstance(source, PointSource): + + if self._psr is None: + raise RuntimeError("The model includes a point source but no point source response was provided") + + psr_copy = self._psr.copy() + psr_copy.set_source(source) + new_source_responses[name] = psr_copy + elif isinstance(source, ExtendedSource): + + if self._esr is None: + raise RuntimeError("The model includes an extended source but no extended source response was provided") + + esr_copy = self._esr.copy() + esr_copy.set_source(source) + new_source_responses[name] = esr_copy + else: + raise RuntimeError(f"The model contains the source {name} " + f"of type {type(source)}. I don't know " + "how to handle it!") + + self._source_responses = new_source_responses + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._cached_model_dict = current_model_dict + + return True + +class BinnedThreeMLModelFolding(BinnedThreeMLModelFoldingInterface, ThreeMLModelFoldingCacheSourceResponsesMixin): + + def __init__(self, + data: BinnedDataInterface, + point_source_response:BinnedThreeMLSourceResponseInterface = None, + extended_source_response: BinnedThreeMLSourceResponseInterface = None): + """ + + Parameters + ---------- + point_source_response: + Response for :class:`astromodels.sources.PointSource`s. + It can be None is you don't plan to use it for point sources. + extended_source_response + Response for :class:`astromodels.sources.ExtendedSource`s + It can be None is you don't plan to use it for extended sources. + """ + + # Interface inputs + self._model = None + + # Implementation inputs + self._psr = point_source_response + self._esr = extended_source_response + + # Cache + # Prevent unnecessary calculations and new memory allocations + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._cached_model_dict = None + self._source_responses = {} + self._expectation = Histogram(data.axes) + + def set_model(self, model: Model): + """ + You need to call set_data() first. + + Parameters + ---------- + model + + Returns + ------- + + """ + + self._model = model + + def expectation(self, axes:Axes, copy:bool = True)->Histogram: + """ + + Parameters + ---------- + data + copy + + Returns + ------- + + """ + + if self._model is None: + raise RuntimeError("Call set_data() and set_model() first") + + # Create a copy of the PSR and ESR for each source + model_changed = self._cache_source_responses() + + if not model_changed: + if copy: + return self._expectation.copy() + else: + return self._expectation + else: + self._expectation.clear() + + # Convolve all sources with the response + for source_name,psr in self._source_responses.items(): + self._expectation += psr.expectation(axes) + + if copy: + return self._expectation.copy() + else: + return self._expectation \ No newline at end of file diff --git a/cosipy/source_injector/source_injector.py b/cosipy/source_injector/source_injector.py index 02c1c46f..8a16781f 100644 --- a/cosipy/source_injector/source_injector.py +++ b/cosipy/source_injector/source_injector.py @@ -84,11 +84,9 @@ def inject_point_source(self, spectrum, coordinate, The spectrum model defined from `astromodels`. coordinate : astropy.coordinates.SkyCoord The coordinate of the point source. - orientation : cosipy.spacecraftfile.SpacecraftFile, optional + orientation : cosipy.spacecraftfile.SpacecraftHistory, optional The orientation of the telescope during the mock - simulation. This is needed when using a detector - response. (the default is `None`, which means a galactic - response is used. + simulation. This is needed when using a detector response. (the default is `None`, which means a galactic response is used. source_name : str, optional The name of the source (the default is `point_source`). make_spectrum_plot : bool, optional diff --git a/cosipy/spacecraftfile/SpacecraftFile.py b/cosipy/spacecraftfile/SpacecraftFile.py deleted file mode 100644 index 494f141d..00000000 --- a/cosipy/spacecraftfile/SpacecraftFile.py +++ /dev/null @@ -1,1298 +0,0 @@ -import numpy as np - -import pathlib - -import matplotlib.pyplot as plt -from matplotlib.colors import LogNorm - -import astropy.units as u -from astropy.io import fits -from astropy.time import Time, TimeDelta -from astropy.coordinates import ( - SkyCoord, - UnitSphericalRepresentation, -) - -from mhealpy import HealpixMap - -from scoords import Attitude, SpacecraftFrame - -from histpy import Axis, HealpixAxis - -from cosipy.response import FullDetectorResponse -from .scatt_map import SpacecraftAttitudeMap - -import logging -logger = logging.getLogger(__name__) - -class SpacecraftFile(): - - def __init__(self, time, - x_pointings = None, - y_pointings = None, - z_pointings = None, - attitudes = None, - earth_zenith = None, - altitude = None, - livetime = None, - frame = "galactic"): - - """ - Handles the spacecraft orientation. Calculates the dwell time - map and point source response over a certain orientation - period. Exports the point source response as RMF and ARF - files that can be read by XSPEC. - - Input must contain either pointings on at least two axes or a - set of Attitudes for each time point; at most one of these is - permitted to avoid inconsistency. All input pointings and - Attitudes will be stored in the specified frame. - - If the input pointings for provided axes are not orthogonal - directions, they are stored as-is, but the Attitude class will - compute an orthogonal approximation to them that is used - internally for all operations. - - Parameters - ---------- - Time : astropy.time.Time - The time stamps for each pointings. Note this is NOT the - time duration. - x_pointings : astropy.coordinates.SkyCoord, optional - The pointings of the x axis of the local - coordinate system attached to the spacecraft (the default - is `None`, which implies no input for the x pointings). - y_pointings : astropy.coordinates.SkyCoord, optional - The pointings of the y axis of the local - coordinate system attached to the spacecraft (the default - is `None`, which implies no input for the y pointings). - z_pointings : astropy.coordinates.SkyCoord, optional - The pointings of the z axis of the local - coordinate system attached to the spacecraft (the default - is `None`, which implies no input for the z pointings). - attitudes : array, optional - Attitudes corresponding to the pointings at each time point. - earth_zenith : astropy.coordinates.SkyCoord, optional - The pointings of the Earth zenith (the default is `None`, - which implies no input for the earth pointings). - altitude : array, optional - Altitude of the spacecraft in km. - livetime : array, optional - Time in seconds the instrument is live for the corresponding - energy bin (using left endpoints so that the last entry in - the ori file is 0). - frame : str, optional - Coordinate frame for stored pointing directions and - Attitudes (default: "galactic") - - """ - - # check if the inputs are valid - - # Time - if isinstance(time, Time): - self._time = time - self._raw_time = self._time.to_value(format = "unix") - self._raw_time_delta = np.diff(self._raw_time) - else: - raise TypeError("The time should be a astropy.time.Time object") - - # x pointings - if not isinstance(x_pointings, (SkyCoord, type(None))): - raise TypeError("The x_pointings should be a SkyCoord object or None!") - - # y pointings - if not isinstance(y_pointings, (SkyCoord, type(None))): - raise TypeError("The y_pointings should be a SkyCoord object or None!") - - # z pointings - if not isinstance(z_pointings, (SkyCoord, type(None))): - raise TypeError("The z_pointings should be a SkyCoord object or None!") - - # attitudes - if not isinstance(attitudes, (Attitude, type(None))): - raise TypeError("attitudes should be an Attitude object or None!") - - n_axes = sum(x is not None for x in (x_pointings, y_pointings, z_pointings)) - - if attitudes is None: - - if n_axes < 2: - raise ValueError("SpacecraftFile requires pointings for at least two axes") - - self.x_pointings = None if x_pointings is None else x_pointings.transform_to(frame) - self.y_pointings = None if y_pointings is None else y_pointings.transform_to(frame) - self.z_pointings = None if z_pointings is None else z_pointings.transform_to(frame) - - self._attitude = Attitude.from_axes(x = x_pointings, - y = y_pointings, - z = z_pointings, - frame = frame) - else: - - if n_axes > 0: - raise ValueError("Cannot specify both attitudes and per-axis pointings") - - self._attitude = attitudes.transform_to(frame) - - pointings = self._attitude.as_axes() - self.x_pointings = pointings[0] - self.y_pointings = pointings[1] - self.z_pointings = pointings[2] - - # earth pointings - if isinstance(earth_zenith, SkyCoord): - self.earth_zenith = earth_zenith.transform_to(frame) - elif earth_zenith is not None: - raise TypeError("The earth_zeniths should be a SkyCoord object or None!") - - # altitude - if altitude is not None: - self._altitude = np.array(altitude) - - # livetime - if livetime is not None: - self.livetime = np.array(livetime) - - self.frame = frame - - - @classmethod - def parse_from_file(cls, file, frame='galactic'): - - """ - Parses timestamps, axis positions from file and returns to __init__. - - Parameters - ---------- - file : str - The file path of the pointings. - frame : str, optional - Frame of returned SpacecraftFile object (default: "galactic", - which matches how the data is stored) - Returns - ------- - cosipy.spacecraftfile.SpacecraftFile - The SpacecraftFile object. - """ - - orientation_file = np.loadtxt(file, - usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), - delimiter=' ', skiprows=1, - comments=("#", "EN")) - - time_stamps = orientation_file[:, 0] - axis_1 = orientation_file[:, [2, 1]] - axis_2 = orientation_file[:, [4, 3]] - axis_3 = orientation_file[:, [7, 6]] - altitude = np.array(orientation_file[:, 5]) - - # left end points, so remove last bin. - livetime = np.array(orientation_file[:, 8])[:-1] - - time = Time(time_stamps, format = "unix") - - # pointings are assumd to be stored in the file in galactic - # coordinates. - xpointings = SkyCoord(l = axis_1[:,0], b = axis_1[:,1], - unit=u.deg, frame = "galactic") - zpointings = SkyCoord(l = axis_2[:,0], b = axis_2[:,1], - unit=u.deg, frame = "galactic") - earthpointings = SkyCoord(l = axis_3[:,0], b = axis_3[:,1], - unit=u.deg, frame = "galactic") - - return cls(time, - x_pointings = xpointings, - z_pointings = zpointings, - earth_zenith = earthpointings, - altitude = altitude, - livetime = livetime, - frame = frame) - - def get_time(self): - - """ - Return the array of pointing times as a astropy.Time object. - - Returns - ------- - astropy.time.Time - The time stamps of the orientation. - """ - - return self._time - - def get_time_delta(self): - - """ - Return an array of the differences between neighbouring time points. - - Returns - ------- - time_delta : astropy.time.TimeDelta - The differences between the neighbouring time stamps. - """ - - time_delta = TimeDelta(self._raw_time_delta, format="sec") - - return time_delta - - def get_altitude(self): - - """ - Return the array of Earth altitude. - - Returns - ------- - numpy array - the Earth altitude. - """ - - return self._altitude - - def get_attitude(self): - - return self._attitude - - @staticmethod - def _cart_to_polar(v): - """ - Convert Cartesian 3D unit direction vectors to polar coordinates. - - Parameters - ---------- - v : np.ndarray(float) [N x 3] - array of N 3D unit vectors - - Returns - ------- - lon, colat : np.ndarray(float) [N] - longitude and co-latitude corresponding to v in radians - - """ - - lon = np.arctan2(v[:,1], v[:,0]) - colat = np.arccos(v[:,2]) - return (lon, colat) - - def source_interval(self, start, stop): - - """ - Return a new SpacecraftFile object including only attitude - information from this object in the time range [start, stop]. - - start and stop must be within the range of the full object's - times; if they exceed this range, they are trimmed to it. If - start and stop fall between times present in the original - object, the attitudes and other position information at these - times are interpolated. - - Parameters - ---------- - start : astropy.time.Time - The start time of the orientation period. - stop : astropy.time.Time - The end time of the orientation period. - - Returns - ------- - cosipy.spacecraft.SpacecraftFile - - """ - - def interp_scalar(t, x1, x2): - """ - Interpolate two scalar quantities - - Parameters - ---------- - t : float in [0, 1] - interpolation fraction - x1 : float - 1st value - x2 : float - 2nd value - - Returns - ------- - float: interpolated value - - """ - - return (1 - t) * x1 + t * x2 - - def interp_direction(t, d1, d2): - """ - Compute a direction that linearly interpolates between - directions d1 and d2 using SLERP. - - The two directions are assumed to have the same frame, - which is also used for the interpolated result. - - Parameters - ---------- - t : float in [0, 1] - interpolation fraction - d1 : SkyCoord - 1st direction - d2 : ndarray - 2nd direction - - Returns - ------- - SkyCoord: interpolated direction - - """ - - if d1 == d2: - return d1 - - v1 = d1.cartesian.xyz.value - v2 = d2.cartesian.xyz.value - - # angle between v1, v2 - theta = np.arccos(np.dot(v1, v2)) - - # SLERP interpolated vector - den = np.sin(theta) - vi = (np.sin((1-t)*theta) * v1 + np.sin(t*theta) * v2) / den - - dvi = SkyCoord(*vi, representation_type='cartesian') - - # make output representation actually (unit) spherical - usr = UnitSphericalRepresentation(lon=dvi.spherical.lon, - lat=dvi.spherical.lat) - - di = SkyCoord(usr, - representation_type=UnitSphericalRepresentation, - frame=d1.frame) - - return di - - def interp_attitude(t, att1, att2): - """ - Compute an Attitude that linearly interpolates between - att1 and att2 using SLERP on their quaternion - representations. - - The two Attitudes are assumed to have the same frame, - which is also used for the interpolated result. - - Parameters - ---------- - t : float in [0, 1] - interpolation fraction - att1 : Attitude - att2 : Attitude - - Returns - ------- - Attitude : interpolated attitude - - """ - - if att1 == att2: - return att1 - - p1 = att1.as_quat() - p2 = att2.as_quat() - - # angle between quaternions p1, p2 (xyzw order) - theta = 2 * np.arccos(np.dot(p1, p2)) - - # SLERP interpolated quaternion - den = np.sin(theta) - pi = (np.sin((1-t)*theta) * p1 + np.sin(t*theta) * p2)/den - - return Attitude.from_quat(pi, frame = att1.frame) - - - # trim times to within range of input orientations - start = max(start, self._time[0]) - stop = min(stop, self._time[-1]) - - if start > stop: - raise ValueError("start time cannot be after stop time.") - - start_time = start.to_value(format='unix') - stop_time = stop.to_value(format='unix') - - # Find smallest range of indices that contain range [start_time, - # stop_ime]. Range will always have size >= 2 unless - # start_time == stop_time and start_time falls exactly - # on a time point. - start_idx = self._raw_time.searchsorted(start_time, side='right') - 1 - stop_idx = self._raw_time.searchsorted(stop_time, side='left') - - new_raw_time = self._raw_time[start_idx : stop_idx + 1] - new_attitude = self._attitude[start_idx : stop_idx + 1] - new_earth_zenith = self.earth_zenith[start_idx : stop_idx + 1] - new_altitude = self._altitude[start_idx : stop_idx + 1] - new_livetime = self.livetime[start_idx : stop_idx] - - if start_time > self._raw_time[start_idx] or stop_time < self._raw_time[stop_idx]: - - # need to modify first and/or last entries -- make a copy - new_raw_time = new_raw_time.copy() - new_attitude = new_attitude.copy() - new_earth_zenith = new_earth_zenith.copy() - new_altitude = new_altitude.copy() - - if start_time > self._raw_time[start_idx]: - - new_raw_time[0] = start_time - - start_frac = \ - (start_time - self._raw_time[start_idx]) / \ - (self._raw_time[start_idx + 1] - self._raw_time[start_idx]) - - new_attitude[0] = interp_attitude(start_frac, - self._attitude[start_idx], - self._attitude[start_idx + 1]) - - # inputs are SkyCoords; result should be too - new_earth_zenith[0] = interp_direction(start_frac, - self.earth_zenith[start_idx], - self.earth_zenith[start_idx + 1]) - - new_altitude[0] = interp_scalar(start_frac, - self._altitude[start_idx], - self._altitude[start_idx + 1]) - - if stop_time < self._raw_time[stop_idx]: - - new_raw_time[-1] = stop_time - - stop_frac = \ - (stop_time - self._raw_time[stop_idx - 1]) / \ - (self._raw_time[stop_idx] - self._raw_time[stop_idx - 1]) - - new_attitude[-1] = interp_attitude(stop_frac, - self._attitude[stop_idx - 1], - self._attitude[stop_idx]) - - # inputs are SkyCoords; result should be too - new_earth_zenith[-1] = interp_direction(stop_frac, - self.earth_zenith[stop_idx - 1], - self.earth_zenith[stop_idx]) - - new_altitude[-1] = interp_scalar(stop_frac, - self._altitude[stop_idx - 1], - self._altitude[stop_idx]) - - # SAA livetime - new_livetime = new_livetime.copy() - - new_livetime[0] = \ - 0 if self.livetime[start_idx] == 0 \ - else new_raw_time[1] - new_raw_time[0] - - new_livetime[-1] = \ - 0 if self.livetime[stop_idx - 1] == 0 \ - else new_raw_time[-1] - new_raw_time[-2] - - new_time = Time(new_raw_time, format = "unix") - - return self.__class__(new_time, - attitudes = new_attitude, - earth_zenith = new_earth_zenith, - altitude = new_altitude, - livetime = new_livetime) - - def get_target_in_sc_frame(self, target_coord): - - """ - Convert a target coordinate in an inertial frame to the path of - the source in the spacecraft frame. The target coordinate may - be provided either as a SkyCoord or as a Cartesian 3-vector, - which determines the type of the output. - - Parameters - ---------- - target_coord : astropy.coordinates.SkyCoord or Cartesian 3-vector - The coordinates of the target object. - Returns - ------- - astropy.coordinates.SkyCoord or pair of np.ndarrays - The target coordinates in the spacecraft frame. If input - was a SkyCoord, output is a vector SkyCoord; otherwise, it - is a pair (longitude, co-latitude) in radians. - - """ - - useSkyCoord = isinstance(target_coord, SkyCoord) - - if useSkyCoord: - target_coord = target_coord.cartesian.xyz.value - - src_path_cartesian = np.dot(self._attitude.rot.inv().as_matrix(), - target_coord) - - # convert to spherical lon, colat in radians - lon, colat = self._cart_to_polar(src_path_cartesian) - - if useSkyCoord: - # SpacecraftFrame takes lon, lat arguments - src_path_skycoord = SkyCoord(lon = lon, lat = np.pi/2 - colat, - unit = u.rad, - frame = SpacecraftFrame()) - - return src_path_skycoord - else: - # return raw longitude and co-latitude in radians - return lon, colat - - - @staticmethod - def _sparse_sum_duplicates(indices, weights=None, dtype=None): - """ - Given an array of indices, possibly with duplicates, and an - optional array of weights per index (defaults to all ones if - None), return a sorted array of the unique values in indices - and, for each, the sum of the weights for each unique index. - - If input weights are provided, only unique indices with - nonzero weights are returned. - - Parameters - ---------- - indices : array of int - weights : array of int or float type - dtype : data type (optional) - Type of returned weights. If None, type is int if weights - not given, or float64 if they are. - - Returns - ------- - - unique_indices : array of int - sorted unique indices in input - - idx_weights : array of type as described above - sum of weights for each unique index in input - - """ - - if weights is None: - unique_indices, idx_weights = np.unique(indices, - return_counts=True) - else: - sp_weights = np.bincount(indices, weights) - unique_indices = np.flatnonzero(sp_weights) - idx_weights = sp_weights[unique_indices] - - if dtype is not None: - idx_weights = idx_weights.astype(dtype, copy=False) - - return unique_indices, idx_weights - - def get_exposure(self, base, theta, phi=None, - lonlat=False, interp=True): - """ - Compute the set of exposed HEALPix pixels relative to a - HealpixBase arising from a sequence of spacecraft-frame - directions with durations as specified in this SpacecraftFile. - - If theta is a SkyCoord, it specifies the full direction. - Else, theta and phi specify the direction as angles. If - lonlat = True, theta and phi are longitude and latitude in - degrees; else, theta and phi are co-latitude and longitude in - radians. - - Parameters - ---------- - base : HealpixBase - HEALPix grid used to discretize exposure - theta : np.ndarray or SkyCoord - if phi is None, a vector SkyCoord - if phi is not none, a vector of angles - colat: np.ndarray, optional - a vector of angles - interp : bool, optional - If True, interpolate the weights onto the HEALPix grid; - else, just map to nearest bin. (Default: interpolate) - - Returns - ------- - pixels : np.ndarray (int) - all HEALPix pixels in the grid with nonzero exposure time - exposures: np.ndarray (float) - exposure time for each pixel - - """ - - duration = self._raw_time_delta - - if len(duration) + 1 != len(theta): - raise ValueError("Source path must have length equal to # times in SpacecraftFile") - - # remove the last src location. Effectively a 0th-order - # interpolation - theta = theta[:-1] - if phi is not None: - phi = phi[:-1] - - if interp: - pixels, weights = base.get_interp_weights(theta=theta, - phi=phi, - lonlat=lonlat) - weighted_duration = weights * duration[None] - else: - # do not interpolate - pixels = base.ang2pix(theta=theta, - phi=phi, - lonlat=lonlat) - weighted_duration = duration - - unique_pixels, unique_weights = \ - self._sparse_sum_duplicates(pixels.ravel(), - weighted_duration.ravel(), - dtype=np.float32) - - return unique_pixels, unique_weights - - def get_dwell_map(self, base, src_path, interp = True): - - """ - Generate a dwell-time map from a source's time-weighted - path in local coordinates. Interpolate the path's time - weights onto the HEALPix grid defined by an instrument - response's NuLambda axis. - - Parameters - ---------- - base : HealpixBase - Definition of HEALPix grid for map - src_path : astropy.coordinates.SkyCoord - Movement of source in detector frame - interp : bool, optional - If True, interpolate the weights onto the HEALPix grid; - else, just map to nearest bin. (Default: interpolate) - - Returns - ------- - mhealpy.containers.healpix_map.HealpixMap - The dwell time map. - - """ - - # check if the target source path is astropy.Skycoord object - if type(src_path) != SkyCoord: - raise TypeError("The coordinates of the source movement in " - "the Spacecraft frame must be a SkyCoord object") - - pixels, weights = self.get_exposure(base, src_path, interp=interp) - - dwell_map = HealpixMap(base = base, - unit = u.second, - coordsys = SpacecraftFrame()) - - map_data = dwell_map.data - map_data[pixels] = weights - - return dwell_map - - - def get_scatt_map(self, - nside, - target_coord = None, - earth_occ = True, - angle_nbins = None): - - """ - Bin the spacecraft attitude history into a list of discretized - attitudes with associated time weights. Discretization is - performed on the rotation-vector representation of the - attitude; the supplied nside parameter describes a HEALPix - grid that discretizes the rotvec's direction, while a multiple - of nside defines the number of bins to discretize its angle. - - If a target coordinate is provided and earth_occ is True, - attitudes for which the view of the target is occluded by - the earth are excluded. - - Parameters - ---------- - nside : int - The nside of the scatt map. - target_coord : astropy.coordinates.SkyCoord, optional - The coordinates of the target object. - earth_occ : bool, optional - Option to include Earth occultation in scatt map calculation. - Default is True. - angle_nbins : int (optional) - Number of bins used for the rotvec's angle. If none - specified, default is 8*nside - - Returns - ------- - cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap - The spacecraft attitude map. - - """ - - source = target_coord - - if earth_occ: - - # earth radius - r_earth = 6378.0 - - # Need a source location to compute earth occultation - if source is None: - raise ValueError("target_coord is needed when earth_occ is True") - - # calculate angle between source direction and Earth zenith - # for each time stamp - src_angle = source.separation(self.earth_zenith) - - # get max angle based on altitude - max_angle = np.pi - np.arcsin(r_earth/(r_earth + self._altitude)) - - # get pointings that are occluded by Earth - is_occluded = src_angle.rad >= max_angle - - # zero out weights of time bins corresponding to occluded pointings - time_weights = np.where(is_occluded[:-1], 0, self.livetime) - - else: - source = None # w/o occultation, result is not dependent on source - time_weights = self.livetime - - # Get orientations as rotation vectors (center dir, angle around center) - - rot_vecs = self._attitude[:-1].as_rotvec() - rot_angles = np.linalg.norm(rot_vecs, axis=-1) - rot_dirs = rot_vecs / rot_angles[:,None] - - # discretize rotvecs for input Attitudes - - dir_axis = HealpixAxis(nside=nside, coordsys=self.frame) - - if angle_nbins is None: - angle_nbins = 8*nside - - angle_axis = Axis(np.linspace(0., 2*np.pi, num=angle_nbins+1), unit=u.rad) - - r_lon, r_colat = self._cart_to_polar(rot_dirs.value) - - dir_bins = dir_axis.find_bin(theta=r_colat, - phi=r_lon) - angle_bins = angle_axis.find_bin(rot_angles) - - # compute list of unique rotvec bins occurring in input, - # along with mapping from time to rotvec bin - shape = (dir_axis.nbins, angle_axis.nbins) - - att_bins = np.ravel_multi_index((dir_bins, angle_bins), - shape) - - # compute an Attitude for each unique rotvec bin - - unique_atts, time_to_att_map = np.unique(att_bins, - return_inverse=True) - (unique_dirs, unique_angles) = np.unravel_index(unique_atts, - shape) - v = dir_axis.pix2vec(unique_dirs) - - binned_attitudes = Attitude.from_rotvec(np.column_stack(v) * - angle_axis.centers[unique_angles][:,None], - frame = self.frame) - - # sum weights for all attitudes mapping to each bin - binned_weights = np.zeros(len(unique_atts)) - np.add.at(binned_weights, time_to_att_map, time_weights) - - # remove any attitudes with zero weight - binned_attitudes = binned_attitudes[binned_weights > 0] - binned_weights = binned_weights[binned_weights > 0] - - return SpacecraftAttitudeMap(binned_attitudes, - u.Quantity(binned_weights, unit=u.s, copy=False), - source = source) - - - def get_psr_rsp(self, response_file, dwell_map, dts = None, pa_convention = None): - - """ - Generates the point source response based on the response file and dwell time map. - dts is used to find the exposure time for this observation. - - Parameters - ---------- - :response_file : str or pathlib.Path - The response file for the observation - dwell_map : HealpixMap object or str.pathlib.Path - The time dwell map for the source, or the name of a file - from which to load it - dts : numpy.ndarray, optional - The elapsed time for each pointing. It must has the same size - as the pointings. If you have saved this array, you can pass - it using this parameter (the defaul is `None`, which implies - that the `dts` will be read from the instance). - pa_convention : str, optional - Polarization convention of response ('RelativeX', - 'RelativeY', or 'RelativeZ') - - Returns - ------- - Ei_edges : numpy.ndarray - The edges of the incident energy. - Ei_lo : numpy.ndarray - The lower edges of the incident energy. - Ei_hi : numpy.ndarray - The upper edges of the incident energy. - Em_edges : numpy.ndarray - The edges of the measured energy. - Em_lo : numpy.ndarray - The lower edges of the measured energy. - Em_hi : numpy.ndarray - The upper edges of the measured energy. - areas : numpy.ndarray - The effective area of each energy bin. - matrix : numpy.ndarray - The energy dispersion matrix. - pa_convention : str, optional - Polarization convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') - - """ - - if isinstance(dwell_map, (str, pathlib.Path)): - dwell_map = HealpixMap.read_map(dwell_map) - - if dts is None: - dts = self.get_time_delta() - else: - dts = TimeDelta(dts*u.second) - - with FullDetectorResponse.open(response_file, pa_convention=pa_convention) as response: - - # get point source response - psr = response.get_point_source_response(dwell_map) - - Ei_edges = np.array(response.axes['Ei'].edges) - self.Ei_lo = np.float32(Ei_edges[:-1]) # use float32 to match the requirement of the data type - self.Ei_hi = np.float32(Ei_edges[1:]) - - Em_edges = np.array(response.axes['Em'].edges) - self.Em_lo = np.float32(Em_edges[:-1]) - self.Em_hi = np.float32(Em_edges[1:]) - - # get the effective area and matrix - logger.info("Getting the effective area ...") - self.areas = np.float32(np.array(psr.project('Ei').to_dense().contents))/dts.to_value(u.second).sum() - spectral_response = np.float32(np.array(psr.project(['Ei','Em']).to_dense().contents)) - self.matrix = np.float32(np.zeros((self.Ei_lo.size, self.Em_lo.size))) # initialize matrix - - logger.info("Getting the energy redistribution matrix ...") - for i in range(self.Ei_lo.size): - new_raw = spectral_response[i,:] / spectral_response[i,:].sum() - self.matrix[i,:] = new_raw - self.matrix = self.matrix.T - - return Ei_edges, self.Ei_lo, self.Ei_hi, Em_edges, self.Em_lo, self.Em_hi, self.areas, self.matrix - - - def get_arf(self, out_name): - - """ - Converts the point source response to an arf file that can be read by XSPEC. - - Parameters - ---------- - out_name: str - The name of the arf file to save. - - """ - - self.out_name = out_name - - # blow write the arf file - copyright_string=" FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " - - ## Create PrimaryHDU - primaryhdu = fits.PrimaryHDU() # create an empty primary HDU - primaryhdu.header["BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value - primaryhdu.header["COMMENT"] = copyright_string # add comments - primaryhdu.header # print headers and their values - - col1_energ_lo = fits.Column(name="ENERG_LO", format="E", unit = "keV", array=self.Em_lo) - col2_energ_hi = fits.Column(name="ENERG_HI", format="E", unit = "keV", array=self.Em_hi) - col3_specresp = fits.Column(name="SPECRESP", format="E", unit = "cm**2", array=self.areas) - cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, col3_specresp]) # create a ColDefs (column-definitions) object for all columns - specresp_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object - - specresp_bintablehdu.header.comments["TTYPE1"] = "label for field 1" - specresp_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" - specresp_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" - specresp_bintablehdu.header.comments["TTYPE2"] = "label for field 2" - specresp_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" - specresp_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" - specresp_bintablehdu.header.comments["TTYPE3"] = "label for field 3" - specresp_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" - specresp_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" - - specresp_bintablehdu.header["EXTNAME"] = ("SPECRESP","name of this binary table extension") - specresp_bintablehdu.header["TELESCOP"] = ("COSI","mission/satellite name") - specresp_bintablehdu.header["INSTRUME"] = ("COSI","instrument/detector name") - specresp_bintablehdu.header["FILTER"] = ("NONE","filter in use") - specresp_bintablehdu.header["HDUCLAS1"] = ("RESPONSE","dataset relates to spectral response") - specresp_bintablehdu.header["HDUCLAS2"] = ("SPECRESP","extension contains an ARF") - specresp_bintablehdu.header["HDUVERS"] = ("1.1.0","version of format") - - new_arfhdus = fits.HDUList([primaryhdu, specresp_bintablehdu]) - new_arfhdus.writeto(f'{out_name}.arf', overwrite=True) - - - def get_rmf(self, out_name): - - """ - Converts the point source response to an rmf file that can be read by XSPEC. - - Parameters - ---------- - out_name: str - The name of the arf file to save. - """ - - self.out_name = out_name - - # blow write the arf file - copyright_string=" FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " - - ## Create PrimaryHDU - primaryhdu = fits.PrimaryHDU() # create an empty primary HDU - primaryhdu.header["BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value - primaryhdu.header["COMMENT"] = copyright_string # add comments - primaryhdu.header # print headers and their values - - ## Create binary table HDU for MATRIX - ### prepare colums - energ_lo = [] - energ_hi = [] - n_grp = [] - f_chan = [] - n_chan = [] - matrix = [] - for i in range(len(self.Ei_lo)): - energ_lo_temp = np.float32(self.Em_lo[i]) - energ_hi_temp = np.float32(self.Ei_hi[i]) - - if self.matrix[:,i].sum() != 0: - nz_matrix_idx = np.nonzero(self.matrix[:,i])[0] # non-zero index for the matrix - subsets = np.split(nz_matrix_idx, np.where(np.diff(nz_matrix_idx) != 1)[0]+1) - n_grp_temp = np.int16(len(subsets)) - f_chan_temp = [] - n_chan_temp = [] - matrix_temp = [] - for m in range(n_grp_temp): - f_chan_temp += [subsets[m][0]] - n_chan_temp += [len(subsets[m])] - for m in nz_matrix_idx: - matrix_temp += [self.matrix[:,i][m]] - f_chan_temp = np.int16(np.array(f_chan_temp)) - n_chan_temp = np.int16(np.array(n_chan_temp)) - matrix_temp = np.float32(np.array(matrix_temp)) - else: - n_grp_temp = np.int16(0) - f_chan_temp = np.int16(np.array([0])) - n_chan_temp = np.int16(np.array([0])) - matrix_temp = np.float32(np.array([0])) - - energ_lo.append(energ_lo_temp) - energ_hi.append(energ_hi_temp) - n_grp.append(n_grp_temp) - f_chan.append(f_chan_temp) - n_chan.append(n_chan_temp) - matrix.append(matrix_temp) - - col1_energ_lo = fits.Column(name="ENERG_LO", format="E",unit = "keV", array=energ_lo) - col2_energ_hi = fits.Column(name="ENERG_HI", format="E",unit = "keV", array=energ_hi) - col3_n_grp = fits.Column(name="N_GRP", format="I", array=n_grp) - col4_f_chan = fits.Column(name="F_CHAN", format="PI(54)", array=f_chan) - col5_n_chan = fits.Column(name="N_CHAN", format="PI(54)", array=n_chan) - col6_n_chan = fits.Column(name="MATRIX", format="PE(161)", array=matrix) - cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, col3_n_grp, col4_f_chan, col5_n_chan, col6_n_chan]) # create a ColDefs (column-definitions) object for all columns - matrix_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object - - matrix_bintablehdu.header.comments["TTYPE1"] = "label for field 1 " - matrix_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" - matrix_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" - matrix_bintablehdu.header.comments["TTYPE2"] = "label for field 2" - matrix_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" - matrix_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" - matrix_bintablehdu.header.comments["TTYPE3"] = "label for field 3 " - matrix_bintablehdu.header.comments["TFORM3"] = "data format of field: 2-byte INTEGER" - matrix_bintablehdu.header.comments["TTYPE4"] = "label for field 4" - matrix_bintablehdu.header.comments["TFORM4"] = "data format of field: variable length array" - matrix_bintablehdu.header.comments["TTYPE5"] = "label for field 5" - matrix_bintablehdu.header.comments["TFORM5"] = "data format of field: variable length array" - matrix_bintablehdu.header.comments["TTYPE6"] = "label for field 6" - matrix_bintablehdu.header.comments["TFORM6"] = "data format of field: variable length array" - - matrix_bintablehdu.header["EXTNAME" ] = ("MATRIX","name of this binary table extension") - matrix_bintablehdu.header["TELESCOP"] = ("COSI","mission/satellite name") - matrix_bintablehdu.header["INSTRUME"] = ("COSI","instrument/detector name") - matrix_bintablehdu.header["FILTER"] = ("NONE","filter in use") - matrix_bintablehdu.header["CHANTYPE"] = ("PI","total number of detector channels") - matrix_bintablehdu.header["DETCHANS"] = (len(self.Em_lo),"total number of detector channels") - matrix_bintablehdu.header["HDUCLASS"] = ("OGIP","format conforms to OGIP standard") - matrix_bintablehdu.header["HDUCLAS1"] = ("RESPONSE","dataset relates to spectral response") - matrix_bintablehdu.header["HDUCLAS2"] = ("RSP_MATRIX","dataset is a spectral response matrix") - matrix_bintablehdu.header["HDUVERS"] = ("1.3.0","version of format") - matrix_bintablehdu.header["TLMIN4"] = (0,"minimum value legally allowed in column 4") - - ## Create binary table HDU for EBOUNDS - channels = np.arange(len(self.Em_lo), dtype=np.int16) - e_min = np.float32(self.Em_lo) - e_max = np.float32(self.Em_hi) - - col1_channels = fits.Column(name="CHANNEL", format="I", array=channels) - col2_e_min = fits.Column(name="E_MIN", format="E",unit="keV", array=e_min) - col3_e_max = fits.Column(name="E_MAX", format="E",unit="keV", array=e_max) - cols = fits.ColDefs([col1_channels, col2_e_min, col3_e_max]) - ebounds_bintablehdu = fits.BinTableHDU.from_columns(cols) - - ebounds_bintablehdu.header.comments["TTYPE1"] = "label for field 1" - ebounds_bintablehdu.header.comments["TFORM1"] = "data format of field: 2-byte INTEGER" - ebounds_bintablehdu.header.comments["TTYPE2"] = "label for field 2" - ebounds_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" - ebounds_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" - ebounds_bintablehdu.header.comments["TTYPE3"] = "label for field 3" - ebounds_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" - ebounds_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" - - ebounds_bintablehdu.header["EXTNAME"] = ("EBOUNDS","name of this binary table extension") - ebounds_bintablehdu.header["TELESCOP"] = ("COSI","mission/satellite") - ebounds_bintablehdu.header["INSTRUME"] = ("COSI","nstrument/detector name") - ebounds_bintablehdu.header["FILTER"] = ("NONE","filter in use") - ebounds_bintablehdu.header["CHANTYPE"] = ("PI","channel type (PHA or PI)") - ebounds_bintablehdu.header["DETCHANS"] = (len(self.Em_lo),"total number of detector channels") - ebounds_bintablehdu.header["HDUCLASS"] = ("OGIP","format conforms to OGIP standard") - ebounds_bintablehdu.header["HDUCLAS1"] = ("RESPONSE","dataset relates to spectral response") - ebounds_bintablehdu.header["HDUCLAS2"] = ("EBOUNDS","dataset is a spectral response matrix") - ebounds_bintablehdu.header["HDUVERS"] = ("1.2.0","version of format") - - new_rmfhdus = fits.HDUList([primaryhdu, matrix_bintablehdu,ebounds_bintablehdu]) - new_rmfhdus.writeto(f'{out_name}.rmf', overwrite=True) - - - def get_pha(self, src_counts, errors, rmf_file = None, arf_file = None, bkg_file = None, exposure_time = None, dts = None, telescope="COSI", instrument="COSI"): - - """ - Generate the pha file that can be read by XSPEC. This file stores the counts info of the source. - - Parameters - ---------- - src_counts : numpy.ndarray - The counts in each energy band. If you have src_counts with unit counts/kev/s, you must convert it to counts by multiplying it with exposure time and the energy band width. - errors : numpy.ndarray - The error for counts. It has the same unit requirement as src_counts. - rmf_file : str, optional - The rmf file name to be written into the pha file (the default is `None`, which implies that it uses the rmf file generate by function `get_rmf`) - arf_file : str, optional - The arf file name to be written into the pha file (the default is `None`, which implies that it uses the arf file generate by function `get_arf`) - bkg_file : str, optional - The background file name (the default is `None`, which implied the `src_counts` is source counts only). - exposure_time : float, optional - The exposure time for this source observation (the default is `None`, which implied that the exposure time will be calculated by `dts`). - dts : numpy.ndarray, optional - It's used to calculate the exposure time. It has the same effect as `exposure_time`. If both `exposure_time` and `dts` are given, `dts` will write over the exposure_time (the default is `None`, which implies that the `dts` will be read from the instance). - telescope : str, optional - The name of the telecope (the default is "COSI"). - instrument : str, optional - The instrument name (the default is "COSI"). - """ - - if rmf_file is None: - rmf_file = f'{self.out_name}.rmf' - - if arf_file is None: - arf_file = f'{self.out_name}.arf' - - if dts is not None: - dts = self.__str_or_array(dts) # FIXME: function does not exist??? - exposure_time = dts.sum() - - channel_number = len(src_counts) - - # define other hardcoded inputs - copyright_string=" FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " - channels = np.arange(channel_number) - - # Create PrimaryHDU - primaryhdu = fits.PrimaryHDU() # create an empty primary HDU - primaryhdu.header["BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value - primaryhdu.header["COMMENT"] = copyright_string # add comments - primaryhdu.header["TELESCOP"] = telescope # add telescope keyword valie - primaryhdu.header["INSTRUME"] = instrument # add instrument keyword valie - primaryhdu.header # print headers and their values - - # Create binary table HDU - a1 = np.array(channels,dtype="int32") # I guess I need to convert the dtype to match the format J - a2 = np.array(src_counts,dtype="int64") # int32 is not enough for counts - a3 = np.array(errors,dtype="int64") # int32 is not enough for errors - col1 = fits.Column(name="CHANNEL", format="J", array=a1) - col2 = fits.Column(name="COUNTS", format="K", array=a2,unit="count") - col3 = fits.Column(name="STAT_ERR", format="K", array=a3,unit="count") - cols = fits.ColDefs([col1, col2, col3]) # create a ColDefs (column-definitions) object for all columns - bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object - - #add other BinTableHDU hear keywords,their values, and comments - bintablehdu.header.comments["TTYPE1"] = "label for field 1" - bintablehdu.header.comments["TFORM1"] = "data format of field: 32-bit integer" - bintablehdu.header.comments["TTYPE2"] = "label for field 2" - bintablehdu.header.comments["TFORM2"] = "data format of field: 32-bit integer" - bintablehdu.header.comments["TUNIT2"] = "physical unit of field 2" - - - bintablehdu.header["EXTNAME"] = ("SPECTRUM","name of this binary table extension") - bintablehdu.header["TELESCOP"] = (telescope,"telescope/mission name") - bintablehdu.header["INSTRUME"] = (instrument,"instrument/detector name") - bintablehdu.header["FILTER"] = ("NONE","filter type if any") - bintablehdu.header["EXPOSURE"] = (exposure_time,"integration time in seconds") - bintablehdu.header["BACKFILE"] = (bkg_file,"background filename") - bintablehdu.header["BACKSCAL"] = (1,"background scaling factor") - bintablehdu.header["CORRFILE"] = ("NONE","associated correction filename") - bintablehdu.header["CORRSCAL"] = (1,"correction file scaling factor") - bintablehdu.header["CORRSCAL"] = (1,"correction file scaling factor") - bintablehdu.header["RESPFILE"] = (rmf_file,"associated rmf filename") - bintablehdu.header["ANCRFILE"] = (arf_file,"associated arf filename") - bintablehdu.header["AREASCAL"] = (1,"area scaling factor") - bintablehdu.header["STAT_ERR"] = (0,"statistical error specified if any") - bintablehdu.header["SYS_ERR"] = (0,"systematic error specified if any") - bintablehdu.header["GROUPING"] = (0,"grouping of the data has been defined if any") - bintablehdu.header["QUALITY"] = (0,"data quality information specified") - bintablehdu.header["HDUCLASS"] = ("OGIP","format conforms to OGIP standard") - bintablehdu.header["HDUCLAS1"] = ("SPECTRUM","PHA dataset") - bintablehdu.header["HDUVERS"] = ("1.2.1","version of format") - bintablehdu.header["POISSERR"] = (False,"Poissonian errors to be assumed, T as True") - bintablehdu.header["CHANTYPE"] = ("PI","channel type (PHA or PI)") - bintablehdu.header["DETCHANS"] = (channel_number,"total number of detector channels") - - new_phahdus = fits.HDUList([primaryhdu, bintablehdu]) - new_phahdus.writeto(f'{self.out_name}.pha', overwrite=True) - - - def plot_arf(self, file_name = None, save_name = None, dpi = 300): - - """ - Read the arf fits file, plot and save it. - - Parameters - ---------- - file_name: str, optional - The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). - save_name: str, optional - The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). - dpi: int, optional - The dpi of the saved image (the default is 300). - """ - - if file_name is None: - file_name = f'{self.out_name}.arf' - - if save_name is None: - save_name = self.out_name - - arf = fits.open(file_name) # read file - - # SPECRESP HDU - self.specresp_hdu = arf["SPECRESP"] - - self.areas = np.array(self.specresp_hdu.data["SPECRESP"]) - self.Em_lo = np.array(self.specresp_hdu.data["ENERG_LO"]) - self.Em_hi = np.array(self.specresp_hdu.data["ENERG_HI"]) - - E_center = (self.Em_lo+self.Em_hi)/2 - E_edges = np.append(self.Em_lo,self.Em_hi[-1]) - - fig, ax = plt.subplots() - ax.hist(E_center,E_edges,weights=self.areas,histtype='step') - - ax.set_title("Effective area") - ax.set_xlabel("Energy[$keV$]") - ax.set_ylabel(r"Effective area [$cm^2$]") - ax.set_xscale("log") - fig.savefig(f"Effective_area_for_{save_name}.png", bbox_inches = "tight", pad_inches=0.1, dpi=dpi) - - - def plot_rmf(self, file_name = None, save_name = None, dpi = 300): - - """ - Read the rmf fits file, plot and save it. - - Parameters - ---------- - file_name: str, optional - The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). - save_name: str, optional - The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). - dpi: int, optional - The dpi of the saved image (the default is 300). - """ - - if file_name is None: - file_name = f'{self.out_name}.rmf' - - if save_name is None: - save_name = self.out_name - - # Read rmf file - rmf = fits.open(file_name) # read file - - # Read the ENOUNDS information - ebounds_ext = rmf["EBOUNDS"] - channel_low = ebounds_ext.data["E_MIN"] # energy bin lower edges for channels (channels are just incident energy bins) - channel_high = ebounds_ext.data["E_MAX"] # energy bin higher edges for channels (channels are just incident energy bins) - - # Read the MATRIX extension - matrix_ext = rmf['MATRIX'] - #logger.info(repr(matrix_hdu.header[:60])) - energy_low = matrix_ext.data["ENERG_LO"] # energy bin lower edges for measured energies - energy_high = matrix_ext.data["ENERG_HI"] # energy bin higher edges for measured energies - data = matrix_ext.data - - # Create a 2-d numpy array and store probability data into the redistribution matrix - rmf_matrix = np.zeros((len(energy_low),len(channel_low))) # create an empty matrix - for i in range(data.shape[0]): # i is the measured energy index, examine the matrix_ext.data rows by rows - if data[i][5].sum() == 0: # if the sum of probabilities is zero, then skip since there is no data at all - pass - else: - #measured_energy_index = np.argwhere(energy_low == data[157][0])[0][0] - f_chan = data[i][3] # get the starting channel of each subsets - n_chann = data[i][4] # get the number of channels in each subsets - matrix = data[i][5] # get the probabilities of this row (incident energy) - indices = [] - for k in f_chan: - channels = 0 - channels = np.arange(k,k + n_chann[np.argwhere(f_chan == k)][0][0]).tolist() # generate the cha - indices += channels # fappend the channels togeter - indices = np.array(indices) - for m in indices: - rmf_matrix[i][m] = matrix[np.argwhere(indices == m)[0][0]] # write the probabilities into the empty matrix - - - # plot the redistribution matrix - xcenter = np.divide(energy_low+energy_high,2) - x_center_coords = np.repeat(xcenter, 10) - y_center_coords = np.tile(xcenter, 10) - energy_all_edges = np.append(energy_low,energy_high[-1]) - #bin_edges = np.array([incident_energy_bins,incident_energy_bins]) # doesn't work - bin_edges = np.vstack((energy_all_edges, energy_all_edges)) - #logger.info(bin_edges) - - self.probability = [] - for i in range(10): - for j in range(10): - self.probability.append(rmf_matrix[i][j]) - #logger.info(type(probability)) - - plt.hist2d(x=x_center_coords,y=y_center_coords,weights=self.probability,bins=bin_edges, norm=LogNorm()) - plt.xscale('log') - plt.yscale('log') - plt.xlabel("Incident energy [$keV$]") - plt.ylabel("Measured energy [$keV$]") - plt.title("Redistribution matrix") - #plt.xlim([70,10000]) - #plt.ylim([70,10000]) - plt.colorbar(norm=LogNorm()) - plt.savefig(f"Redistribution_matrix_for_{save_name}.png", bbox_inches = "tight", pad_inches=0.1, dpi=dpi) diff --git a/cosipy/spacecraftfile/__init__.py b/cosipy/spacecraftfile/__init__.py index 3aaad3d1..558f75d5 100644 --- a/cosipy/spacecraftfile/__init__.py +++ b/cosipy/spacecraftfile/__init__.py @@ -1,2 +1,2 @@ -from .SpacecraftFile import SpacecraftFile +from .spacecraft_file import * from .scatt_map import SpacecraftAttitudeMap, SpacecraftAxisMap diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py new file mode 100644 index 00000000..71cda049 --- /dev/null +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -0,0 +1,738 @@ +from pathlib import Path + +import numpy as np + +import astropy.units as u +import astropy.constants as c + +from astropy.time import Time +from astropy.coordinates import SkyCoord, EarthLocation, GCRS, SphericalRepresentation, CartesianRepresentation, \ + UnitSphericalRepresentation +from astropy.units import Quantity +from mhealpy import HealpixBase +from histpy import Histogram, TimeAxis, HealpixAxis, Axis +from mhealpy import HealpixMap + +from scoords import Attitude, SpacecraftFrame + +import pandas as pd + +from .scatt_map import SpacecraftAttitudeMap + +from typing import Union, Optional + +import logging +logger = logging.getLogger(__name__) + +__all__ = ["SpacecraftHistory"] + +class SpacecraftHistory: + + def __init__(self, + obstime: Time, + attitude: Attitude, + location: GCRS, + livetime: u.Quantity = None): + """ + Handles the spacecraft orientation. Calculates the dwell time + map and point source response over a certain orientation + period. + + Parameters + ---------- + obstime: + The obstime stamps for each pointings. Note this is NOT the obstime duration, see "livetime". + attitude: + Spacecraft orientation with respect to an inertial system. + location: + Location of the spacecraft at each timestamp in Earth-centered inertial (ECI) coordinates. + livetime: + Time the instrument was live for the corresponding + obstime bin. Should have one less element than the number of + timestamps. If not provided, it will assume that the instrument + was fully on without interrruptions. + """ + + time_axis = TimeAxis(obstime, copy = False, label= 'obstime') + + if livetime is None: + livetime = time_axis.widths.to(u.s) + + self._hist = Histogram(time_axis, livetime, copy_contents = False) + + if not (location.shape == () or location.shape == obstime.shape): + raise ValueError(f"'location' must be a scalar or have the same length as the timestamps ({obstime.shape}), but it has shape ({location.shape})") + + if not (attitude.shape == () or attitude.shape == obstime.shape): + raise ValueError(f"'attitude' must be a scalar or have the same length as the timestamps ({obstime.shape}), but it has shape ({attitude.shape})") + + self._attitude = attitude + + self._gcrs = location + + @property + def nintervals(self): + return self._hist.nbins + + @property + def intervals_duration(self): + return self._hist.axis.widths.to(self._hist.unit) + + @property + def intervals_tstart(self): + return self._hist.axis.lower_bounds + + @property + def intervals_tstop(self): + return self._hist.axis.upper_bounds + + @property + def tstart(self): + return self._hist.axis.lo_lim + + @property + def tstop(self): + return self._hist.axis.hi_lim + + @property + def npoints(self): + return self._hist.nbins + 1 + + @property + def obstime(self): + return self._hist.axis.edges + + @property + def livetime(self): + return self._hist.contents + + @property + def attitude(self): + return self._attitude + + @property + def location(self)->GCRS: + return self._gcrs + + @property + def earth_zenith(self) -> SkyCoord: + """ + Pointing of the Earth's zenith at the location of the SC + """ + gcrs_sph = self._gcrs.represent_as(SphericalRepresentation) + return SkyCoord(ra=gcrs_sph.lon, dec=gcrs_sph.lat, frame='icrs', copy=False) + + @classmethod + def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftHistory": + + """ + Parses timestamps, axis positions from file and returns to __init__. + + Parameters + ---------- + file : str + The file path of the pointings. + tstart: + Start reading the file from an interval *including* this time. Use select_interval() to + cut the SC file at exactly this tiem. + tstop: + Stop reading the file at an interval *including* this time. Use select_interval() to + cut the SC file at exactly this tiem. + + Returns + ------- + cosipy.spacecraftfile.spacecraft_file + The SpacecraftHistory object. + """ + + file = Path(file) + + if file.suffix == ".ori": + return cls._parse_from_file(file, tstart, tstop) + else: + raise ValueError(f"File format for {file} not supported") + + @classmethod + def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftHistory": + """ + Parses an .ori txt file with MEGAlib formatting. + + # Columns + # 0: Always "OG" (orbital geometry) + # 1: obstime: timestamp in unix seconds + # 2: lat_x: galactic latitude of SC x-axis (deg) + # 3: lon_x: galactic longitude of SC x-axis (deg) + # 4: lat_z galactic latitude of SC z-axis (deg) + # 5: lon_z: galactic longitude of SC y-axis (deg) + # 6: altitude: altitude above from Earth's ellipsoid (km) + # 7: Earth_lat: galactic latitude of the direction the Earth's zenith is pointing to at the SC location (deg) + # 8: Earth_lon: galactic longitude of the direction the Earth's zenith is pointing to at the SC location (deg) + # 9: livetime (previously called SAA): accumulated uptime up to the following entry (seconds) + + Parameters + ---------- + file: + Path to .ori file + + Returns + ------- + cosipy.spacecraftfile.spacecraft_file + The SpacecraftHistory object. + """ + + # First and last line are read only by MEGAlib e.g. + # Type OrientationsGalactic + # ... + # EN + # Using [:-1] instead of skipfooter=1 because otherwise it's slow and you get + # ParserWarning: Falling back to the 'python' engine because the 'c' engine does not support skipfooter; you can avoid this warning by specifying engine='python'. + + time, lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = pd.read_csv(file, sep="\s+", skiprows=1, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), header = None, comment = '#', ).values[:-1].transpose() + + time = Time(time, format="unix") + + if tstart is not None or tstop is not None: + # Cut early to skip some conversions later on + + start_idx = 0 + stop_idx = time.size + + time_axis = TimeAxis(time, copy=False) + + if tstart is not None: + start_idx = time_axis.find_bin(tstart) + + if tstop is not None: + stop_idx = time_axis.find_bin(tstop) + 2 + + time = time[start_idx:stop_idx] + lat_x = lat_x[start_idx:stop_idx] + lon_x = lon_x[start_idx:stop_idx] + lat_z = lat_z[start_idx:stop_idx] + lon_z = lon_z[start_idx:stop_idx] + altitude = altitude[start_idx:stop_idx] + earth_lat = earth_lat[start_idx:stop_idx] + earth_lon = earth_lon[start_idx:stop_idx] + livetime = livetime[start_idx:stop_idx] + + xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") + zpointings = SkyCoord(l=lon_z * u.deg, b=lat_z * u.deg, frame="galactic") + + attitude = Attitude.from_axes(x=xpointings, z=zpointings, frame = 'galactic') + + livetime = livetime[:-1]*u.s # The last element is 0. + + # Currently, the orbit information is in a weird format. + # The altitude is specified with respect to the Earth's surface, like + # you would specify it in a geodetic format, while + # the lon/lat is specified in J2000, like you would in ECI. + # Eventually everything should be in ECI (GCRS in astropy + # for all practical purposes), but for now let's do the conversion. + # 1. Get the direction in galactic + # 2. Transform to GCRS, which uses RA/Dec (ICRS-like). + # This is represented in the unit sphere + # 3. Add the altitude by transforming to EarthLocation. + # Should take care of the non-spherical Earth + # 4. Go back GCRS, now with the correct distance + # (from the Earth's center) + zenith_gal = SkyCoord(l=earth_lon * u.deg, b=earth_lat * u.deg, frame="galactic", copy = False) + gcrs = zenith_gal.transform_to('gcrs') + earth_loc = EarthLocation.from_geodetic(lon=gcrs.ra, lat=gcrs.dec, height=altitude*u.km) + gcrs2 = GCRS(ra=gcrs.ra, dec=gcrs.dec, distance=earth_loc.itrs.cartesian.norm(), copy=False) + + return cls(time, attitude, gcrs2, livetime) + + @staticmethod + def _interp_location(t, d1, d2): + """ + Compute a direction that linearly interpolates between + directions d1 and d2 using SLERP. + + The two directions are assumed to have the same frame, + which is also used for the interpolated result. + + Parameters + ---------- + t : float in [0, 1] + interpolation fraction + d1 : GCRS + 1st direction + d2 : GCRS + 2nd direction + + Returns + ------- + SkyCoord: interpolated direction + + """ + + if np.all(d1 == d2): + return d1 + + v1 = d1.cartesian.xyz.value + v2 = d2.cartesian.xyz.value + unit = d1.cartesian.xyz.unit + + # angle between v1, v2 + theta = np.arccos(np.einsum('i...,i...->...',v1, v2)/d1.spherical.distance.value/d2.spherical.distance.value) + + # SLERP interpolated vector + den = np.sin(theta) + vi = (np.sin((1 - t) * theta) * v1 + np.sin(t * theta) * v2) / den + + dvi = GCRS(*Quantity(vi, unit = unit, copy = False), representation_type='cartesian') + + return dvi + + @staticmethod + def _interp_attitude(t, att1, att2): + """ + Compute an Attitude that linearly interpolates between + att1 and att2 using SLERP on their quaternion + representations. + + The two Attitudes are assumed to have the same frame, + which is also used for the interpolated result. + + Parameters + ---------- + t : float in [0, 1] + interpolation fraction + att1 : Attitude + att2 : Attitude + + Returns + ------- + Attitude : interpolated attitude + + """ + + if att1 == att2: + return att1 + + p1 = att1.as_quat() + p2 = att2.as_quat() + + # angle between quaternions p1, p2 (xyzw order) + theta = 2 * np.arccos(np.einsum('i...,i...->...',p1.transpose(), p2.transpose())) + + # Makes it work with scalars or any input shape + t = t[..., np.newaxis] + theta = theta[..., np.newaxis] + + # SLERP interpolated quaternion + den = np.sin(theta) + pi = (np.sin((1 - t) * theta) * p1 + np.sin(t * theta) * p2) / den + + return Attitude.from_quat(pi, frame=att1.frame) + + def interp_attitude(self, time) -> Attitude: + """ + + Returns + ------- + + """ + + points, weights = self.interp_weights(time) + + return self.__class__._interp_attitude(weights[1], self._attitude[points[0]], self._attitude[points[1]]) + + def interp_location(self, time) -> GCRS: + """ + + Returns + ------- + """ + + points, weights = self.interp_weights(time) + + return self.__class__._interp_location(weights[1], self._gcrs[points[0]], self._gcrs[points[1]]) + + def _cumulative_livetime(self, points, weights) -> u.Quantity: + + cum_livetime_discrete = np.append(0 * self._hist.unit, np.cumsum(self.livetime)) + + up_to_tstart = cum_livetime_discrete[points[0]] + + within_bin = self.livetime[points[0]] * weights[1] + + cum_livetime = up_to_tstart + within_bin + + return cum_livetime + + def cumulative_livetime(self, time: Optional[Time] = None) -> u.Quantity: + """ + Get the cumulative live obstime up to this obstime. + + The live obstime in between the internal timestamp is + assumed constant. + + All by edfault + + Parameters + ---------- + time: + Timestamps + + Returns + ------- + Cummulative live obstime, with units. + """ + + if time is None: + # All + return np.sum(self.livetime) + + points, weights = self.interp_weights(time) + + return self._cumulative_livetime(points, weights) + + def interp_weights(self, times: Time): + return self._hist.axis.interp_weights_edges(times) + + def interp(self, times: Time) -> 'SpacecraftHistory': + + """ + Linearly interpolates attitude and position at a given obstime + + Parameters + ---------- + times: + Timestamps to interpolate + + Returns + ------- + A new SpacecraftHistory object interpolated at these location + """ + + if times.size < 2: + raise ValueError("We need at least two obstime stamps. See also interp_attitude and inter_location") + + points, weights = self.interp_weights(times) + + interp_attitude = self._interp_attitude(weights[1], self._attitude[points[0]], self._attitude[points[1]]) + interp_location = self._interp_location(weights[1], self._gcrs[points[0]], self._gcrs[points[1]]) + + cum_livetime = self._cumulative_livetime(points, weights) + diff_livetime = cum_livetime[1:] - cum_livetime[:-1] + + return self.__class__(times, interp_attitude, interp_location, diff_livetime) + + def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHistory": + """ + Returns the SpacecraftHistory file class object for the source interval. + + Parameters + ---------- + start : astropy.time.Time + The start obstime of the orientation period. Start of history by default. + stop : astropy.time.Time + The end obstime of the orientation period. End of history by default. + + Returns + ------- + cosipy.spacecraft.SpacecraftHistory + """ + + if start is None: + start = self.tstart + + if stop is None: + stop = self.tstop + + if start < self.tstart or stop > self.tstop: + raise ValueError(f"Input range ({start}-{stop}) is outside the SC history ({self.tstart}-{self.tstop})") + + start_points, start_weights = self.interp_weights(start) + stop_points, stop_weights = self.interp_weights(stop) + + # Center values + new_obstime = self.obstime[start_points[1]:stop_points[1]] + new_attitude = self._attitude.as_matrix()[start_points[1]:stop_points[1]] + new_location = self._gcrs[start_points[1]:stop_points[1]].cartesian.xyz + new_livetime = self.livetime[start_points[1]:stop_points[0]] + + # Left edge + # new_obstime.size can be zero if the requested interval fell completely + # an existing interval + if new_obstime.size == 0 or new_obstime[0] != start: + # Left edge might be included already + + new_obstime = Time(np.append(start.jd1, new_obstime.jd1), + np.append(start.jd2, new_obstime.jd2), + format = 'jd') + + start_attitude = self._interp_attitude(start_weights[1], self._attitude[start_points[0]], self._attitude[start_points[1]]) + new_attitude = np.append(start_attitude.as_matrix()[None], new_attitude, axis=0) + + start_location = self._interp_location(start_weights[1], self._gcrs[start_points[0]], self._gcrs[start_points[1]])[None].cartesian.xyz + new_location = np.append(start_location, new_location, axis = 1) + + first_livetime = self.livetime[start_points[0]] * start_weights[0] + new_livetime = np.append(first_livetime, new_livetime) + + # Right edge + # It's never included, since stop <= self.obstime[stop_points[1]], and the + # selection above excludes stop_points[1] + new_obstime = Time(np.append(new_obstime.jd1, stop.jd1), + np.append(new_obstime.jd2, stop.jd2), + format='jd') + + stop_attitude = self._interp_attitude(stop_weights[1], self._attitude[stop_points[0]], self._attitude[stop_points[1]]) + new_attitude = np.append(new_attitude, stop_attitude.as_matrix()[None], axis=0) + new_attitude = Attitude.from_matrix(new_attitude, frame=self._attitude.frame) + + stop_location = self._interp_location(stop_weights[1], self._gcrs[stop_points[0]], self._gcrs[stop_points[1]])[None].cartesian.xyz + new_location = np.append(new_location, stop_location, axis=1) + + new_location = GCRS(x = new_location[0], y = new_location[1], z = new_location[2], + representation_type='cartesian') + + if np.all(start_points == stop_points): + # This can only happen if the requested interval fell completely + # an existing interval + new_livetime[0] -= self.livetime[stop_points[0]]*stop_weights[0] + else: + last_livetime = self.livetime[stop_points[0]]*stop_weights[1] + new_livetime = np.append(new_livetime, last_livetime) + + # We used the internal jd1 and jd2 values, which might have changed the format. + # Bring it back + new_obstime.format = self.obstime.format + + return self.__class__(new_obstime, new_attitude, new_location, new_livetime) + + + def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: + + """ + Get the location in spacecraft coordinates for a given target + in inertial coordinates. + + Parameters + ---------- + target_coord : astropy.coordinates.SkyCoord + The coordinates of the target object. + + Returns + ------- + astropy.coordinates.SkyCoord + The target coordinates in the spacecraft frame. + """ + + logger.info("Now converting to the Spacecraft frame...") + + src_path = SkyCoord(np.dot(self.attitude.rot.inv().as_matrix(), target_coord.cartesian.xyz.value), + representation_type = 'cartesian', + frame = SpacecraftFrame()) + + src_path.representation_type = 'spherical' + + return src_path + + def get_dwell_map(self, target_coord:SkyCoord, nside:int = None, scheme = 'ring', base:HealpixBase = None) -> HealpixMap: + + """ + Generates the dwell obstime map for the source. + + Parameters + ---------- + target_coord: + Source coordinate + nside: + Healpix NSIDE + scheme: + Healpix pixel ordering scheme + base: + HealpixBase defining the grid. Alternative to nside & scheme. + + Returns + ------- + mhealpy.containers.healpix_map.HealpixMap + The dwell obstime map. + """ + + # Get source path + src_path_skycoord = self.get_target_in_sc_frame(target_coord) + + # Empty map + dwell_map = HealpixMap(nside = nside, + scheme = scheme, + base = base, + coordsys = SpacecraftFrame()) + + # Fill + # Get the unique pixels to weight, and sum all the correspondint weights first, so + # each pixels needs to be called only once. + # Based on https://stackoverflow.com/questions/23268605/grouping-indices-of-unique-elements-in-numpy + + # remove the last value. Effectively a 0th order interpolations + pixels, weights = dwell_map.get_interp_weights(theta=src_path_skycoord[:-1]) + + weighted_duration = weights * self.livetime.to_value(u.second)[None] + + pixels = pixels.flatten() + weighted_duration = weighted_duration.flatten() + + pixels_argsort = np.argsort(pixels) + + pixels = pixels[pixels_argsort] + weighted_duration = weighted_duration[pixels_argsort] + + first_unique = np.concatenate(([True], pixels[1:] != pixels[:-1])) + + pixel_unique = pixels[first_unique] + + splits = np.nonzero(first_unique)[0][1:] + pixel_durations = [np.sum(weighted_duration[start:stop]) for start, stop in + zip(np.append(0, splits), np.append(splits, pixels.size))] + + for pix, dur in zip(pixel_unique, pixel_durations): + dwell_map[pix] += dur + + dwell_map.to(u.second, update=False, copy=False) + + return dwell_map + + def get_scatt_map(self, + nside, + target_coord=None, + earth_occ=True, + angle_nbins=None) -> SpacecraftAttitudeMap: + + """ + Bin the spacecraft attitude history into a list of discretized + attitudes with associated time weights. Discretization is + performed on the rotation-vector representation of the + attitude; the supplied nside parameter describes a HEALPix + grid that discretizes the rotvec's direction, while a multiple + of nside defines the number of bins to discretize its angle. + + If a target coordinate is provided and earth_occ is True, + attitudes for which the view of the target is occluded by + the earth are excluded. + + Parameters + ---------- + nside : int + The nside of the scatt map. + target_coord : astropy.coordinates.SkyCoord, optional + The coordinates of the target object. + earth_occ : bool, optional + Option to include Earth occultation in scatt map calculation. + Default is True. + angle_nbins : int (optional) + Number of bins used for the rotvec's angle. If none + specified, default is 8*nside + + Returns + ------- + cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap + The spacecraft attitude map. + + """ + + def _cart_to_polar(v): + """ + Convert Cartesian 3D unit direction vectors to polar coordinates. + + Parameters + ---------- + v : np.ndarray(float) [N x 3] + array of N 3D unit vectors + + Returns + ------- + lon, colat : np.ndarray(float) [N] + longitude and co-latitude corresponding to v in radians + + """ + + lon = np.arctan2(v[:, 1], v[:, 0]) + colat = np.arccos(v[:, 2]) + return (lon, colat) + + source = target_coord + + if earth_occ: + + # earth radius + r_earth = 6378.0 + + # Need a source location to compute earth occultation + if source is None: + raise ValueError("target_coord is needed when earth_occ is True") + + # calculate angle between source direction and Earth zenith + # for each time stamp + src_angle = source.separation(self.earth_zenith) + + # get max angle based on altitude + max_angle = np.pi - np.arcsin(r_earth/(r_earth + self.location.spherical.distance.km)) + + # get pointings that are occluded by Earth + is_occluded = src_angle.rad >= max_angle + + # zero out weights of time bins corresponding to occluded pointings + time_weights = np.where(is_occluded[:-1], 0, self.livetime.value) + + else: + source = None # w/o occultation, result is not dependent on source + time_weights = self.livetime.value + + # Get orientations as rotation vectors (center dir, angle around center) + + rot_vecs = self._attitude[:-1].as_rotvec() + rot_angles = np.linalg.norm(rot_vecs, axis=-1) + rot_dirs = rot_vecs / rot_angles[:,None] + + # discretize rotvecs for input Attitudes + + dir_axis = HealpixAxis(nside=nside, coordsys=self._attitude.frame) + + if angle_nbins is None: + angle_nbins = 8*nside + + angle_axis = Axis(np.linspace(0., 2*np.pi, num=angle_nbins+1), unit=u.rad) + + r_lon, r_colat = _cart_to_polar(rot_dirs.value) + + dir_bins = dir_axis.find_bin(theta=r_colat, + phi=r_lon) + angle_bins = angle_axis.find_bin(rot_angles) + + # compute list of unique rotvec bins occurring in input, + # along with mapping from time to rotvec bin + shape = (dir_axis.nbins, angle_axis.nbins) + + att_bins = np.ravel_multi_index((dir_bins, angle_bins), + shape) + + # compute an Attitude for each unique rotvec bin + + unique_atts, time_to_att_map = np.unique(att_bins, + return_inverse=True) + (unique_dirs, unique_angles) = np.unravel_index(unique_atts, + shape) + v = dir_axis.pix2vec(unique_dirs) + + binned_attitudes = Attitude.from_rotvec(np.column_stack(v) * + angle_axis.centers[unique_angles][:,None], + frame = self._attitude.frame) + + # sum weights for all attitudes mapping to each bin + binned_weights = np.zeros(len(unique_atts)) + np.add.at(binned_weights, time_to_att_map, time_weights) + + # remove any attitudes with zero weight + binned_attitudes = binned_attitudes[binned_weights > 0] + binned_weights = binned_weights[binned_weights > 0] + + return SpacecraftAttitudeMap(binned_attitudes, + u.Quantity(binned_weights, unit=self.livetime.unit, copy=False), + source = source) + + + + diff --git a/cosipy/statistics/__init__.py b/cosipy/statistics/__init__.py new file mode 100644 index 00000000..cc6ae409 --- /dev/null +++ b/cosipy/statistics/__init__.py @@ -0,0 +1 @@ +from .likelihood_functions import * \ No newline at end of file diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py new file mode 100644 index 00000000..21b260fc --- /dev/null +++ b/cosipy/statistics/likelihood_functions.py @@ -0,0 +1,122 @@ +import itertools +import logging +import operator + +from cosipy import UnBinnedData +from cosipy.interfaces.expectation_interface import ExpectationInterface, ExpectationDensityInterface +from cosipy.util.iterables import itertools_batched + +logger = logging.getLogger(__name__) + +from cosipy.interfaces import (BinnedLikelihoodInterface, + UnbinnedLikelihoodInterface, + BinnedDataInterface, + BinnedExpectationInterface, + BinnedBackgroundInterface, DataInterface, BackgroundInterface, EventDataInterface, + BackgroundDensityInterface, + ) + +import numpy as np + +__all__ = ['UnbinnedLikelihood', + 'PoissonLikelihood'] + +class UnbinnedLikelihood(UnbinnedLikelihoodInterface): + def __init__(self, + expectation:ExpectationDensityInterface, + batch_size:int = 100000): + """ + Will get the number of events from the response and bkg expectation_density iterators + + Parameters + ---------- + response + bkg + """ + + self._expectation = expectation + self._nobservations = None + + self._batch_size = batch_size + + @property + def nobservations(self) -> int: + """ + Calling get_log_like first is faster, since we don't need to loop though the + events + """ + + if self._nobservations is None: + self._nobservations = sum(1 for _ in self._expectation.expectation_density()) + + return self._nobservations + + def get_log_like(self) -> float: + + # Total number of events + ntot = self._expectation.expected_counts() + + # It's faster to compute all log values at once, but requires keeping them in memory + # Doing it by chunk is a compromise. We might need to adjust the chunk_size + # Based on the system + nobservations = 0 + density_log_sum = 0 + + for density_iter_chunk in itertools_batched(self._expectation.expectation_density(), self._batch_size): + + density = np.fromiter(density_iter_chunk, dtype=float) + + if np.any(density == 0): + # np.log(0) = -inf for any event, no need to keep iterationg + return -np.inf + + density_log_sum += np.sum(np.log(density)) + nobservations += density.size + + self._nobservations = nobservations + + # Log L = -Ntot + sum_i (dN/dOmega)_i + # (dN/dOmega)_i is the expectation density, not a derivative + # (dN/dOmega)_i = Ntot*P_i, where P_i is the event probability + log_like = density_log_sum - ntot + + return log_like + + +class PoissonLikelihood(BinnedLikelihoodInterface): + def __init__(self, data:BinnedDataInterface, + response:BinnedExpectationInterface, + bkg:BinnedBackgroundInterface = None): + + self._data = data + self._bkg = bkg + self._response = response + + @property + def has_bkg(self): + return self._bkg is not None + + @property + def nobservations(self) -> int: + return self._data.data.contents.size + + def get_log_like(self) -> float: + + # Compute expectation including background + # If we don't have background, we won't modify the expectation, so + # it's safe to use the internal cache. + expectation = self._response.expectation(self._data.axes, copy = self.has_bkg) + + if self.has_bkg: + # We won't modify the bkg expectation, so it's safe to use the internal cache + expectation += self._bkg.expectation(self._data.axes, copy = False) + + # Get the arrays + expectation = expectation.contents + data = self._data.data.contents + + # Compute the log-likelihood: + log_like = np.nansum(data * np.log(expectation) - expectation) + + return log_like + diff --git a/cosipy/test_data/20280301_first_10sec.ori b/cosipy/test_data/20280301_first_10sec.ori index bd60cfd3..578095c6 100644 --- a/cosipy/test_data/20280301_first_10sec.ori +++ b/cosipy/test_data/20280301_first_10sec.ori @@ -1,13 +1,13 @@ Type OrientationsGalactic -OG 1835478000.0 73.14907746670937 41.85821768724895 16.85092253329064 221.85821768724895 0.0 0.0 0.0 1.0 -OG 1835478001.0 73.09517926980278 41.88225011209611 16.904820730197223 221.8822501120961 0.0 0.0 0.0 1.0 -OG 1835478002.0 73.04128380352786 41.90629597072256 16.95871619647214 221.90629597072257 0.0 0.0 0.0 1.0 -OG 1835478003.0 72.98739108131268 41.93035532675578 17.012608918687327 221.93035532675577 0.0 0.0 0.0 1.0 -OG 1835478004.0 72.9335011165853 41.954428243823145 17.066498883414702 221.95442824382317 0.0 0.0 0.0 1.0 -OG 1835478005.0 72.87961392277379 41.978514785552235 17.120386077226204 221.97851478555222 0.0 0.0 0.0 1.0 -OG 1835478006.0 72.82572951330626 42.002615015570285 17.174270486693747 222.0026150155703 0.0 0.0 0.0 1.0 -OG 1835478007.0 72.77184790161073 42.02672899750497 17.228152098389273 222.02672899750493 0.0 0.0 0.0 1.0 -OG 1835478008.0 72.7179691011153 42.05085679498347 17.282030898884702 222.05085679498347 0.0 0.0 0.0 1.0 -OG 1835478009.0 72.66409312524804 42.07499847163346 17.335906874751963 222.07499847163342 0.0 0.0 0.0 1.0 -OG 1835478010.0 72.61021998743702 42.09915409108222 17.38978001256298 222.09915409108223 0.0 0.0 0.0 0.0 +OG 1835478000.0 73.14907746670937 41.85821768724895 16.85092253329064 221.85821768724895 550.0 0.0 0.0 1.0 +OG 1835478001.0 73.09517926980278 41.88225011209611 16.904820730197223 221.8822501120961 550.0 0.0 0.062 1.0 +OG 1835478002.0 73.04128380352786 41.90629597072256 16.95871619647214 221.90629597072257 550.0 0.0 0.124 1.0 +OG 1835478003.0 72.98739108131268 41.93035532675578 17.012608918687327 221.93035532675577 550.0 0.0 0.188 1.0 +OG 1835478004.0 72.9335011165853 41.954428243823145 17.066498883414702 221.95442824382317 550.0 0.0 0.250 1.0 +OG 1835478005.0 72.87961392277379 41.978514785552235 17.120386077226204 221.97851478555222 550.0 0.0 0.313 1.0 +OG 1835478006.0 72.82572951330626 42.002615015570285 17.174270486693747 222.0026150155703 550.0 0.0 0.376 1.0 +OG 1835478007.0 72.77184790161073 42.02672899750497 17.228152098389273 222.02672899750493 550.0 0.0 0.439 1.0 +OG 1835478008.0 72.7179691011153 42.05085679498347 17.282030898884702 222.05085679498347 550.0 0.0 0.501 1.0 +OG 1835478009.0 72.66409312524804 42.07499847163346 17.335906874751963 222.07499847163342 550.0 0.0 0.564 1.0 +OG 1835478010.0 72.61021998743702 42.09915409108222 17.38978001256298 222.09915409108223 550.0 0.0 0.627 0.0 EN diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py deleted file mode 100644 index b93ac6f1..00000000 --- a/cosipy/threeml/COSILike.py +++ /dev/null @@ -1,352 +0,0 @@ -import numpy as np - -from threeML import PluginPrototype -from astromodels import Parameter - -from cosipy.response import ( - FullDetectorResponse, - ExtendedSourceResponse -) - -import logging -logger = logging.getLogger(__name__) - -class COSILike(PluginPrototype): - """ - COSI 3ML plugin. - - Parameters - ---------- - name : str - Plugin name e.g. "cosi". Needs to have a distinct name with - respect to other plugins in the same analysis - dr : str - Path to full detector response - data : histpy.Histogram - Binned data. Note: Eventually this should be a cosipy data - class - bkg : histpy.Histogram - Binned background model. Note: Eventually this should be a - cosipy data class - sc_orientation : cosipy.spacecraftfile.SpacecraftFile - Contains the information of the orientation: timestamps - (astropy.Time) and attitudes (scoord.Attitude) that describe - the spacecraft for the duration of the data included in the - analysis - nuisance_param : astromodels.core.parameter.Parameter, optional - Background parameter - coordsys : str, optional - Coordinate system ('galactic' or 'spacecraftframe') to perform - fit in, which should match coordinate system of data and - background. This only needs to be specified if the binned data - and background do not have a coordinate system attached to - them - precomputed_psr_file : str, optional - Full path to precomputed point source response in Galactic - coordinates - earth_occ : bool, optional - Option to include Earth occultation in fit (default is True). - - """ - def __init__(self, name, dr, data, bkg, sc_orientation, - nuisance_param = None, - coordsys = None, - precomputed_psr_file = None, - earth_occ=True, - **kwargs): - - # create the hash for the nuisance parameters. We have none for now. - self._nuisance_parameters = {} - - # call the prototype constructor. Boilerplate. - super(COSILike, self).__init__(name, self._nuisance_parameters) - - # User inputs needed to compute the likelihood - self._name = name - - self._sc_orientation = sc_orientation - self.earth_occ = earth_occ - - # Full detector response for point sources - self._dr = FullDetectorResponse.open(dr) - - # Precomputed image response for extended - # sources - if precomputed_psr_file is not None: - logger.info("... loading the pre-computed image response ...") - self.image_response = ExtendedSourceResponse.open(precomputed_psr_file) - logger.info("--> done") - - if data.is_sparse: - self._data = data.contents.todense() - else: - self._data = data.contents.value - - if bkg.is_sparse: - self._bkg = bkg.contents.todense() - else: - self._bkg = bkg.contents.value - - try: - data_frame = data.axes["PsiChi"].coordsys.name - bkg_frame = bkg.axes["PsiChi"].coordsys.name - if data_frame != bkg_frame: - raise RuntimeError(f"Data is binned in {data_frame}, while background is binned in {bkg_frame}. Coordinates systems must match." ) - else: - self._coordsys = data_frame - except: - if coordsys is None: - raise RuntimeError("There is no coordinate system attached to the binned data. One must be provided by specifiying coordsys='galactic' or 'spacecraftframe'.") - else: - self._coordsys = coordsys - - # Set to fit nuisance parameter if given by user - if nuisance_param is None: - self.set_inner_minimization(False) - elif isinstance(nuisance_param, Parameter): - self.set_inner_minimization(True) - self._bkg_par = nuisance_param - self._bkg_par.free = self._fit_nuisance_params - self._nuisance_parameters[self._bkg_par.name] = self._bkg_par - else: - raise RuntimeError("Nuisance parameter must be astromodels.core.parameter.Parameter object") - - # Temporary fix to only print log-likelihood warning once max per fit - self._printed_warning = False - - self._model = None - - def set_model(self, model): - """ - Set the model to be used in the joint minimization and discard - any cached information from prior models. This function - *must* be called at least once before computing the log - likelihood with get_log_like(), as well as any time the model - structure (as opposed to its free parameters' values) changes. - It is called once, automatically when constructing the - JointLikelihood object. - - """ - - point_sources = model.point_sources - extended_sources = model.extended_sources - - # cached per-source information - self._expected_counts = {} # used externally - - # used only for point sources internally - self._source_location = {} - self._psr = {} - - for name in point_sources: - self._source_location[name] = None - self._psr[name] = None - self._expected_counts[name] = None - - for name in extended_sources: - self._expected_counts[name] = None - - self._model = model - - def compute_expectation(self, model): - """ - Compute the total expected counts of the model - - Parameters - ---------- - model : astromodels.core.model.Model - Any model supported by astromodels - - Returns - ------- - signal : total expected counts - """ - - signal = None - - # Get expectation for extended sources - for name, source in model.extended_sources.items(): - # Set spectrum - # Note: the spectral parameters are updated internally by 3ML - # during the likelihood scan. - - # Get expectation using precomputed psr in Galactic coordinates - total_expectation = \ - self.image_response.get_expectation_from_astromodel(source) - - # Save expected counts for each source, - # in order to enable easy plotting after likelihood scan - self._expected_counts[name] = total_expectation.copy() - - # extract expectation from source as raw numpy array - if total_expectation.is_sparse: - total_expectation = total_expectation.contents.todense() - elif total_expectation.unit is not None: - total_expectation = total_expectation.contents.value - else: - total_expectation = total_expectation.contents - - # Add source to signal - if signal is None: - signal = total_expectation - else: - signal += total_expectation - - # Get expectation for point sources - for name, source in model.point_sources.items(): - - # source location changed - if source.position.sky_coord != self._source_location[name]: - logger.info(f"... Re-calculating the point source response of {name} ...") - coord = source.position.sky_coord - self._source_location[name] = coord.copy() - - if self._coordsys == 'spacecraftframe': - dwell_time_map = self._get_dwell_time_map(coord) - self._psr[name] = self._dr.get_point_source_response(exposure_map=dwell_time_map) - elif self._coordsys == 'galactic': - scatt_map = self._get_scatt_map(coord) - self._psr[name] = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) - else: - raise RuntimeError("Unknown coordinate system") - - logger.info(f"--> done (source name : {name})") - - # Convolve with spectrum - # See also the Detector Response and Source Injector tutorials - spectrum = source.spectrum.main.shape - total_expectation = self._psr[name].get_expectation(spectrum) - total_expectation.project(('Em', 'Phi', 'PsiChi')) - - # Save expected counts for each source, - # in order to enable easy plotting after likelihood scan - self._expected_counts[name] = total_expectation.copy() - - # extract expectation from source as raw numpy array - if total_expectation.is_sparse: - total_expectation = total_expectation.contents.todense() - elif total_expectation.unit is not None: - total_expectation = total_expectation.contents.value - else: - total_expectation = total_expectation.contents - - # Add source to signal - if signal is None: - signal = total_expectation - else: - signal += total_expectation - - return signal - - - def get_log_like(self): - """ - Calculate the log-likelihood. - - Returns - ---------- - log_like : float - Value of the log-likelihood - """ - - if self._model is None: - raise ValueError("Must set model before computing likelihood!") - - signal = self.compute_expectation(self._model) - - if self._fit_nuisance_params: - # Compute expectation including free background parameter - nv = self._nuisance_parameters[self._bkg_par.name].value - expectation = signal + nv * self._bkg - else: - # Compute expectation without background parameter - expectation = signal + self._bkg - - # avoid -infinite log-likelihood (occurs when expected counts - # = 0 but data != 0) - expectation += 1e-12 - - if not self._printed_warning: - # This 1e-12 should be defined as a parameter in the near - # future (HY) - logger.warning("Adding 1e-12 to each bin of the expectation to avoid log-likelihood = -inf.") - self._printed_warning = True - - # Compute the log-likelihood: - log_like = np.nansum(self._data * np.log(expectation) - expectation) - - return log_like - - def inner_fit(self): - """ - Required for 3ML fit. - - In theory, this function is called to optimize the nuisance - parameters given fixed values of the model parameters. But - in fact, it is called on every iteration, so the nuisance - params are treated as "just another parameter" to optimize. - """ - - return self.get_log_like() - - def _get_dwell_time_map(self, coord): - """ - Get the dwell time map of the source in the inertial (spacecraft) - frame. - - Parameters - ---------- - coord : astropy.coordinates.SkyCoord - Coordinates of the target source - - Returns - ------- - dwell_time_map : mhealpy.containers.healpix_map.HealpixMap - Dwell time map - - """ - - src_path = self._sc_orientation.get_target_in_sc_frame(coord) - dwell_time_map = \ - self._sc_orientation.get_dwell_map(base = self._dr, - src_path = src_path) - - return dwell_time_map - - def _get_scatt_map(self, coord): - """ - Get the spacecraft attitude map of the source in the inertial - (spacecraft) frame. - - Parameters - ---------- - coord : astropy.coordinates.SkyCoord - The coordinates of the target object. - - Returns - ------- - scatt_map : cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap - - """ - - scatt_map = \ - self._sc_orientation.get_scatt_map(nside = self._dr.nside * 2, - target_coord = coord, - earth_occ = self.earth_occ) - - return scatt_map - - def set_inner_minimization(self, flag: bool): - """ - Turn on the minimization of the internal COSI (nuisance) parameters. - - Parameters - ---------- - flag : bool - Turns on and off the minimization of the internal parameters - """ - - self._fit_nuisance_params: bool = bool(flag) - - for parameter in self._nuisance_parameters: - self._nuisance_parameters[parameter].free = self._fit_nuisance_params diff --git a/cosipy/threeml/__init__.py b/cosipy/threeml/__init__.py index e5bf2d12..a6eaca5e 100644 --- a/cosipy/threeml/__init__.py +++ b/cosipy/threeml/__init__.py @@ -1,2 +1 @@ -from .COSILike import COSILike from .custom_functions import Band_Eflux diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py new file mode 100644 index 00000000..e06af95c --- /dev/null +++ b/cosipy/threeml/psr_fixed_ei.py @@ -0,0 +1,212 @@ +import copy +from typing import Optional, Iterable, Type + +import numpy as np +from astromodels import PointSource +from astropy.coordinates import UnitSphericalRepresentation, CartesianRepresentation +from astropy.units import Quantity +from executing import Source +from histpy import Axis + +from cosipy import SpacecraftHistory +from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame +from cosipy.interfaces import UnbinnedThreeMLSourceResponseInterface, EventInterface +from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface +from cosipy.interfaces.event import EmCDSEventInSCFrameInterface, TimeTagEmCDSEventInSCFrameInterface +from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface +from cosipy.response.photon_types import PhotonWithDirectionAndEnergyInSCFrame + +from astropy import units as u + +class UnbinnedThreeMLPointSourceResponseTrapz(UnbinnedThreeMLSourceResponseInterface): + + def __init__(self, + data: TimeTagEmCDSEventDataInSCFrameInterface, + irf:FarFieldInstrumentResponseFunctionInterface, + sc_history: SpacecraftHistory, + energies:Quantity): + """ + Will integrate the spectrum by evaluation the IRF at fixed Ei position and using a simple + trapezoidal rule + + All IRF queries are cached + + Parameters + ---------- + irf + energies: evaluation points + """ + + # Interface inputs + self._source = None + + # Other implementation inputs + self._data = data + self._irf = irf + self._sc_ori = sc_history + # Energies will later use with a PhotonWithEnergyInterface, with uses keV + self._energies_keV = energies.to_value(u.keV) + + # This can be computed once and for all + # Trapezoidal rule weights to integrate in Ei + ewidths = np.diff(self._energies_keV) + self._trapz_weights = np.zeros_like(self._energies_keV) + self._trapz_weights[:-1] = ewidths + self._trapz_weights[1:] = ewidths + self._trapz_weights /= 2 + + self._attitude_at_event_times = self._sc_ori.interp_attitude(self._data.time) + + # Caches + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_source_dict = None + + # The IRF values change for each direction, but it's the same for all spectrum parameters + + # Source location cached separately since changing the response + # for a given direction is expensive + self._last_convolved_source_skycoord = None + + # For integral for nevents + # int Aeff(t, Ei) F(Ei) dt dEi + # Will need to multiply by _trapz_weights*F(Ei) and sum. + # Once per Ei + self._exposure = None # In cm2*s + + # axis 0: events + # axis 1: energy_i samples + self._event_prob_weights = None # in cm2/keV + + # Integrated over Ei + self._nevents = None + self._event_prob = None + + @property + def event_type(self) -> Type[EventInterface]: + return TimeTagEmCDSEventInSCFrameInterface + + def set_source(self, source: Source): + """ + The source is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + self._source = source + + def clear_cache(self): + + self._last_convolved_source_dict = None + self._last_convolved_source_skycoord = None + self._nevents = None + self._exposure = None + self._event_prob = None + self._event_prob_weights = None + + def copy(self) -> "ThreeMLSourceResponseInterface": + """ + This method is used to re-use the same object for multiple + sources. + It is expected to return a copy of itself, but deepcopying + any necessary information such that when + a new source is set, the expectation calculation + are independent. + + psr1 = ThreeMLSourceResponse() + psr2 = psr.copy() + psr1.set_source(source1) + psr2.set_source(source2) + """ + + new = copy.copy(self) + new.clear_cache() + return new + + def _update_cache(self): + """ + Performs all calculation as needed depending on the current source location + + Returns + ------- + """ + if self._source is None: + raise RuntimeError("Call set_source() first.") + + source_dict = self._source.to_dict() + coord = self._source.position.sky_coord + + if (self._nevents is not None) and (self._event_prob is not None) and self._last_convolved_source_dict == source_dict: + # Nothing has changed + return + + if (self._exposure is None) or (self._event_prob_weights is None) or coord != self._last_convolved_source_skycoord: + # Updating the location is very cost intensive. Only do if necessary + + # Compute nevents integral by integrating though the SC history + # This only computes the weights based on the source location. + # Once we know the source source spectrum, we can integrate over Ei + coord_vec = coord.transform_to(self._sc_ori.attitude.frame).cartesian.xyz.value + sc_coord_vec = self._sc_ori.attitude.rot[:-1].inv().apply(coord_vec) + sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) + + # For each SC timestamp, get the effective area for each energy point, store it as temporary array, + # and multiply by livetime. + # Sum up the exposure (one per energy point) without saving it to memory + # TODO: account for Earth occultation + exposure = sum([dt*np.fromiter(self._irf.effective_area_cm2([PhotonWithDirectionAndEnergyInSCFrame(c.lon.rad, c.lat.rad, e) + for e in self._energies_keV]), dtype = float) + for c,dt in zip(sc_coord_sph,self._sc_ori.livetime.to_value(u.s))]) + + self._exposure = exposure # cm2 * s + + # Get the probability for each event for the source location and each Ei + # TODO: account for livetime and Earth occultation + sc_coord_vec = self._attitude_at_event_times.rot.inv().apply(coord_vec) + sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) + self._event_prob_weights = np.fromiter(self._irf.differential_effective_area_cm2([(PhotonWithDirectionAndEnergyInSCFrame(coord.lon.rad, coord.lat.rad, energy), event) + for coord,event in zip(sc_coord_sph, self._data) \ + for energy in self._energies_keV]), + dtype = float) # cm2 / keV.rad.sr + + self._event_prob_weights = self._event_prob_weights.reshape((sc_coord_sph.size, self._energies_keV.size)) + + flux_values = self._source(self._energies_keV) #1/cm2/s/keV (3Ml default) + weight_flux_values = flux_values * self._trapz_weights #1/cm2/s + self._nevents = np.sum(self._exposure * weight_flux_values) # unit-less + + self._event_prob = np.sum((self._event_prob_weights * weight_flux_values[None, :]), axis=1) # 1/keV.s.rad.sr + self._event_prob /= self._nevents + + self._last_convolved_source_dict = source_dict + self._last_convolved_source_skycoord = coord.copy() + + def expected_counts(self) -> float: + """ + Total expected counts + """ + + self._update_cache() + + return self._nevents + + + def event_probability(self) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. + + Parameters + ---------- + start : None | int + From beginning by default + stop: None|int + Until the end by default + """ + + self._update_cache() + + return self._event_prob diff --git a/cosipy/threeml/unbinned_model_folding.py b/cosipy/threeml/unbinned_model_folding.py new file mode 100644 index 00000000..8e448bb0 --- /dev/null +++ b/cosipy/threeml/unbinned_model_folding.py @@ -0,0 +1,66 @@ +import itertools +from typing import Optional, Iterable + +import numpy as np +from astromodels import Model, PointSource, ExtendedSource + +from cosipy.interfaces import UnbinnedThreeMLModelFoldingInterface, UnbinnedThreeMLSourceResponseInterface +from cosipy.interfaces.data_interface import EventDataInSCFrameInterface, EventDataInterface +from cosipy.response.threeml_response import ThreeMLModelFoldingCacheSourceResponsesMixin + + +class UnbinnedThreeMLModelFolding(UnbinnedThreeMLModelFoldingInterface, ThreeMLModelFoldingCacheSourceResponsesMixin): + + def __init__(self, + point_source_response = UnbinnedThreeMLSourceResponseInterface, + extended_source_response: UnbinnedThreeMLSourceResponseInterface = None): + + # Interface inputs + self._model = None + + # Implementation inputs + self._psr = point_source_response + self._esr = extended_source_response + + if (self._psr is not None) and (self._esr is not None) and self._psr.event_type != self._esr.event_type: + raise RuntimeError("Point and Extended Source Response must handle the same event type") + + self._event_type = self._psr.event_type + + # Cache + # Each source has its own cache. + # We could cache the sum of all sources, but I thought + # it was not worth it for the typical use case. Usually + # at least one source changes in between call + self._cached_model_dict = None + self._source_responses = {} + + @property + def event_type(self): + return self._event_type + + def set_model(self, model: Model): + """ + The model is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + self._model = model + + def expected_counts(self) -> float: + """ + Total expected counts + """ + + self._cache_source_responses() + + return sum(s.expected_counts() for s in self._source_responses.values()) + + def expectation_density(self) -> Iterable[float]: + """ + Sum of expectation density + """ + + self._cache_source_responses() + + return [sum(expectations) for expectations in zip(*(s.expectation_density() for s in self._source_responses.values()))] diff --git a/cosipy/threeml/util.py b/cosipy/threeml/util.py new file mode 100644 index 00000000..406a3f6b --- /dev/null +++ b/cosipy/threeml/util.py @@ -0,0 +1,34 @@ +from astromodels.core.polarization import Polarization, LinearPolarization, StokesPolarization + +def to_linear_polarization(polarization: Polarization): + # FIXME: the logic of this code block should be moved to 3ML. + # We want to see if the source is polarized, and if so, confirm + # transform to linear polarization. + # https://github.com/threeML/astromodels/blob/master/astromodels/core/polarization.py + if polarization is not None: + + if type(polarization) == Polarization: + # FIXME: Polarization is the base class, but a 3ML source + # with no polarization default to the base class. + # The base class shouldn't be able to be instantiated, + # and we should have a NullPolarization subclass or None + polarization = None + + elif isinstance(polarization, LinearPolarization): + + if polarization.degree.value is 0: + polarization = None + + elif isinstance(polarization, StokesPolarization): + + # FIXME: Here we should convert the any Stokes parameters to Linear + # The circular component looks like unpolarized to us. + # This conversion is not yet implemented in Astromodels + raise ValueError("Fix me. I can't handle StokesPolarization yet") + + else: + + if isinstance(polarization, Polarization): + raise TypeError(f"Fix me. I don't know how to handle this polarization type") + else: + raise TypeError(f"Polarization must be a Polarization subclass") \ No newline at end of file diff --git a/cosipy/ts_map/fast_ts_fit.py b/cosipy/ts_map/fast_ts_fit.py index 9fda91f8..a11de41d 100644 --- a/cosipy/ts_map/fast_ts_fit.py +++ b/cosipy/ts_map/fast_ts_fit.py @@ -10,7 +10,7 @@ import healpy as hp from mhealpy import HealpixBase -from cosipy import SpacecraftFile +from cosipy import SpacecraftHistory from cosipy.response import FullDetectorResponse, GalacticResponse from cosipy.response.functions import get_integrated_spectral_model @@ -40,7 +40,7 @@ def __init__(self, data, bkg_model, response_path, orientation = None, Model used to estimate background counts in observed data. response_path : str or pathlib.Path Path to response file. - orientation : cosipy.SpacecraftFile, optional + orientation : cosipy.SpacecraftHistory, optional Orientation history of spacecraft; required for "local" cds_frame, not used if frame is "galactic" cds_frame : str, optional diff --git a/cosipy/util/iterables.py b/cosipy/util/iterables.py new file mode 100644 index 00000000..ecd62561 --- /dev/null +++ b/cosipy/util/iterables.py @@ -0,0 +1,16 @@ +import itertools + +def itertools_batched(iterable, n, *, strict=False): + """ + itertools.batched was added in version 3.12. + Use the "roughly equivalent" from itertools documentation for now. + """ + + # batched('ABCDEFG', 2) → AB CD EF G + if n < 1: + raise ValueError('n must be at least one') + iterator = iter(iterable) + while batch := tuple(itertools.islice(iterator, n)): + if strict and len(batch) != n: + raise ValueError('batched(): incomplete batch') + yield batch \ No newline at end of file diff --git a/docs/api/interfaces/examples/crab/background.yaml b/docs/api/interfaces/examples/crab/background.yaml new file mode 100644 index 00000000..526971dd --- /dev/null +++ b/docs/api/interfaces/examples/crab/background.yaml @@ -0,0 +1,14 @@ +#----------# +# Data I/O: + +data_file: "/path/to/background/tra/file" # full path +ori_file: "/path/to/ori/file" # full path to orientation file +unbinned_output: 'fits' # 'fits' or 'hdf5' +time_bins: 3600 # time bin size in seconds. Takes int or list of bin edges. +energy_bins: [100., 158.489, 251.189, 398.107, 630.957, 1000., 1584.89, 2511.89, 3981.07, 6309.57, 10000.] # Takes list. Needs to match response. +phi_pix_size: 5 # binning of Compton scattering angle [deg] +nside: 8 # healpix binning of psi chi local +scheme: 'ring' # healpix binning of psi chi local +tmin: 1835487300.0 # Min time cut in seconds. +tmax: 1843467255.0 # Max time cut in seconds. +#----------# diff --git a/docs/api/interfaces/examples/crab/crab.yaml b/docs/api/interfaces/examples/crab/crab.yaml new file mode 100644 index 00000000..da2a86af --- /dev/null +++ b/docs/api/interfaces/examples/crab/crab.yaml @@ -0,0 +1,14 @@ +#----------# +# Data I/O: + +data_file: "/path/to/crab/tra/file" # full path +ori_file: "/path/to/ori/file" +unbinned_output: 'fits' # 'fits' or 'hdf5' +time_bins: 3600 # time bin size in seconds. Takes int or list of bin edges. +energy_bins: [100., 158.489, 251.189, 398.107, 630.957, 1000., 1584.89, 2511.89, 3981.07, 6309.57, 10000.] # Takes list. Needs to match response. +phi_pix_size: 5 # binning of Compton scattering angle [deg] +nside: 8 # healpix binning of psi chi local +scheme: 'ring' # healpix binning of psi chi local +tmin: 1835487300.0 # Min time cut in seconds. +tmax: 1843467255.0 # Max time cut in seconds. +#----------# diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py new file mode 100644 index 00000000..a25d823f --- /dev/null +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -0,0 +1,458 @@ +#!/usr/bin/env python +# coding: utf-8 + +# # Spectral fitting example (Crab) + +# **To run this, you need the following files, which can be downloaded using the first few cells of this notebook:** +# - orientation file (20280301_3_month_with_orbital_info.ori) +# - binned data (crab_bkg_binned_data.hdf5, crab_binned_data.hdf5, & bkg_binned_data.hdf5) +# - detector response (SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5) +# +# **The binned data are simulations of the Crab Nebula and albedo photon background produced using the COSI SMEX mass model. The detector response needs to be unzipped before running the notebook.** + +# This notebook fits the spectrum of a Crab simulated using MEGAlib and combined with background. +# +# [3ML](https://threeml.readthedocs.io/) is a high-level interface that allows multiple datasets from different instruments to be used coherently to fit the parameters of source model. A source model typically consists of a list of sources with parametrized spectral shapes, sky locations and, for extended sources, shape. Polarization is also possible. A "coherent" analysis, in this context, means that the source model parameters are fitted using all available datasets simultanously, rather than performing individual fits and finding a well-suited common model a posteriori. +# +# In order for a dataset to be included in 3ML, each instrument needs to provide a "plugin". Each plugin is responsible for reading the data, convolving the source model (provided by 3ML) with the instrument response, and returning a likelihood. In our case, we'll compute a binned Poisson likelihood: +# +# $$ +# \log \mathcal{L}(\mathbf{x}) = \sum_i \log \frac{\lambda_i(\mathbf{x})^{d_i} \exp (-\lambda_i)}{d_i!} +# $$ +# +# where $d_i$ are the counts on each bin and $\lambda_i$ are the expected counts given a source model with parameters $\mathbf{x}$. +# +# In this example, we will fit a single point source with a known location. We'll assume the background is known and fixed up to a scaling factor. Finally, we will fit a Band function: +# +# $$ +# f(x) = K \begin{cases} \left(\frac{x}{E_{piv}}\right)^{\alpha} \exp \left(-\frac{(2+\alpha) +# * x}{x_{p}}\right) & x \leq (\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \\ \left(\frac{x}{E_{piv}}\right)^{\beta} +# * \exp (\beta-\alpha)\left[\frac{(\alpha-\beta) x_{p}}{E_{piv}(2+\alpha)}\right]^{\alpha-\beta} +# * &x>(\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \end{cases} +# $$ +# +# where $K$ (normalization), $\alpha$ & $\beta$ (spectral indeces), and $x_p$ (peak energy) are the free parameters, while $E_{piv}$ is the pivot energy which is fixed (and arbitrary). +# +# Considering these assumptions: +# +# $$ +# \lambda_i(\mathbf{x}) = B*b_i + s_i(\mathbf{x}) +# $$ +# +# where $B*b_i$ are the estimated counts due to background in each bin with $B$ the amplitude and $b_i$ the shape of the background, and $s_i$ are the corresponding expected counts from the source, the goal is then to find the values of $\mathbf{x} = [K, \alpha, \beta, x_p]$ and $B$ that maximize $\mathcal{L}$. These are the best estimations of the parameters. +# +# The final module needs to also fit the time-dependent background, handle multiple point-like and extended sources, as well as all the spectral models supported by 3ML. Eventually, it will also fit the polarization angle. However, this simple example already contains all the necessary pieces to do a fit. + +# In[1]: + + +from cosipy import test_data, BinnedData +from cosipy.spacecraftfile import SpacecraftHistory +from cosipy.response.FullDetectorResponse import FullDetectorResponse +from cosipy.util import fetch_wasabi_file + +from cosipy.statistics import PoissonLikelihood +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse +from cosipy.data_io import EmCDSBinnedData + +import sys + +from scoords import SpacecraftFrame + +from astropy.time import Time +import astropy.units as u +from astropy.coordinates import SkyCoord, Galactic + +import numpy as np +import matplotlib.pyplot as plt + +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from astromodels import Parameter, Powerlaw + +from pathlib import Path + +import os + + +def main(): + + single_bkg_fit = True + + # ## Download and read in binned data + + # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into + + data_path = Path("") # /path/to/files. Current dir by default + + + # Download the orientation file + + + # In[ ]: + + sc_orientation_path = data_path / "DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori', + output=sc_orientation_path, checksum = 'e5e71e3528e39b855b0e4f74a1a2eebe') + + # Download the binned Crab data + + # In[7]: + + crab_data_path = data_path / "crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5" + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5', + output=crab_data_path, checksum = '405862396dea2be79d7892d6d5bb50d8') + + bkg_components = {"PrimaryProtons":{'filename':'PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum':'7597f04210e59340a0888c66fc5cbc63'}, + "PrimaryAlphas": {'filename': 'PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum': '76a68da730622851b8e1c749248c3b40'}, + "AlbedoPhotons": {'filename': 'AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum': '76c58361d2c9b43b66ef2e41c18939c4'}, + "AlbedoNeutrons": {'filename': 'AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum': '8f3cb418c637b839665a4fcbd000d2eb'}, + "CosmicPhotons": {'filename': 'CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5', 'checksum': '93c4619b383572d318328e6380e35a70'}, + "CosmicDiffuse": {'filename': 'GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5', 'checksum': 'd0415d4d04b040af47f23f5d08cb7d64'}, + "SecondaryPositrons": {'filename': 'SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5', 'checksum': '5fec2212dcdbb4c43c3ac02f02524f68'}, + "SecondaryProtons": {'filename': 'SecondaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.fits.gz.hdf5', 'checksum': '78aefa46707c98563294a898a62845c1'}, + "SAAprotons": {'filename': 'SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5', 'checksum': 'fc69fbbfd94cd595f57a8b11fc721169'}, + } + + # Download the binned background data + for bkg in bkg_components.values(): + wasabi_path = 'COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/'+bkg['filename'] + fetch_wasabi_file(wasabi_path, output=data_path/bkg['filename'], checksum = bkg['checksum']) + + # Download the response file + dr_path = data_path / "ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5" + fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5', + output=str(dr_path), checksum = '7121f094be50e7bfe9b31e53015b0e85') + + + # Read in the spacecraft orientation file + + # In[4]: + + + sc_orientation = SpacecraftHistory.open(sc_orientation_path) + + + # Create BinnedData objects for the Crab only, Crab+background, and background only. The Crab only simulation is not used for the spectral fit, but can be used to compare the fitted spectrum to the source simulation + + # In[5]: + + + crab = BinnedData(data_path / "crab.yaml") + crab.load_binned_data_from_hdf5(binned_data=crab_data_path) + + for bkg in bkg_components.values(): + binned_data = BinnedData(data_path / "background.yaml") + binned_data.load_binned_data_from_hdf5(binned_data=data_path/bkg['filename']) + bkg['dist'] = binned_data.binned_data.project('Em', 'Phi', 'PsiChi') + + # Load binned .hdf5 files + + # In[6]: + + + # Define the path to the detector response + # ## Perform spectral fit + + # ============ Interfaces ============== + + dr = FullDetectorResponse.open(dr_path) + instrument_response = BinnedInstrumentResponse(dr) + + # Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin + + # In[8]: + total_bkg = None + for bkg in bkg_components.values(): + if total_bkg is None: + total_bkg = bkg['dist'] + else: + total_bkg = total_bkg + bkg['dist'] # Issues with in-place operations for sparse contents + + if single_bkg_fit: + bkg_dist = {"total_bkg":total_bkg} + else: + bkg_dist = {l: b['dist'] for l, b in bkg_components.items()} + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + for bckfile in bkg_dist.keys() : + bkg_dist[bckfile] += sys.float_info.min + + #combine the data + the bck like we would get for real data + data = EmCDSBinnedData(crab.binned_data.project('Em', 'Phi', 'PsiChi') + total_bkg) + bkg = FreeNormBinnedBackground(bkg_dist, + sc_history=sc_orientation, + copy = False) + + # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, + # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. + # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation + # can provide the response for an arbitrary directions, Ei and Pol values. + psr = BinnedThreeMLPointSourceResponse(data = data, + instrument_response = instrument_response, + sc_history=sc_orientation, + energy_axis = dr.axes['Ei'], + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside = 2*data.axes['PsiChi'].nside) + + ##==== + + + response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) + + like_fun = PoissonLikelihood(data, response, bkg) + + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response, + bkg) + + # Nuisance parameter guess, bounds, etc. + for bkg_label in bkg_dist.keys(): + cosi.bkg_parameter[bkg_label] = Parameter(bkg_label, # background parameter + 1, # initial value of parameter + min_value=0, # minimum value of parameter + max_value= 100 if single_bkg_fit else 20, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + unit = u.Hz + ) + + # ======== Interfaces end ========== + + # Define a point source at the known location with a Band function spectrum and add it to the model. The initial values of the Band function parameters are set to the true values used to simulate the source + + + # In[9]: + + l = 184.56 + b = -5.78 + + alpha = -1.99 + beta = -2.32 + E0 = 531. * (alpha - beta) * u.keV + xp = E0 * (alpha + 2) / (alpha - beta) + piv = 500. * u.keV + K = 3.07e-5 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + + spectrum.alpha.min_value = -2.14 + spectrum.alpha.max_value = 3.0 + spectrum.beta.min_value = -5.0 + spectrum.beta.max_value = -2.15 + spectrum.xp.min_value = 1.0 + spectrum.K.min_value = 1e-10 + + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + spectrum.alpha.delta = 0.01 + spectrum.beta.delta = 0.01 + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + model = Model( + source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + + # Optional: if you want to call get_log_like manually, then you also need to set the model manually + # 3ML does this internally during the fit though + cosi.set_model(model) + + + # Gather all plugins and combine with the model in a JointLikelihood object, then perform maximum likelihood fit + + # In[10]: + + + plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) + + like = JointLikelihood(model, plugins, verbose = False) + + like.fit() + + + # ## Error propagation and plotting (Band function) + + # Define Band function spectrum injected into MEGAlib + + # In[11]: + + ## Injected + + l = 184.56 + b = -5.78 + + alpha_inj = -1.99 + beta_inj = -2.32 + E0_inj = 531. * (alpha_inj - beta_inj) * u.keV + xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) + piv_inj = 100. * u.keV + K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV + + spectrum_inj = Band() + + spectrum_inj.alpha.min_value = -2.14 + spectrum_inj.alpha.max_value = 3.0 + spectrum_inj.beta.min_value = -5.0 + spectrum_inj.beta.max_value = -2.15 + spectrum_inj.xp.min_value = 1.0 + spectrum_inj.K.min_value = 1e-10 + + spectrum_inj.alpha.value = alpha_inj + spectrum_inj.beta.value = beta_inj + spectrum_inj.xp.value = xp_inj.value + spectrum_inj.K.value = K_inj.value + spectrum_inj.piv.value = piv_inj.value + + spectrum_inj.xp.unit = xp_inj.unit + spectrum_inj.K.unit = K_inj.unit + spectrum_inj.piv.unit = piv_inj.unit + + # Expectation for injected source + source_inj = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum_inj) # Spectral model + + psr.set_source(source_inj) + expectation_inj = psr.expectation(copy=True) + + + # The summary of the results above tell you the optimal values of the parameters, as well as the errors. Propogate the errors to the "evaluate_at" method of the spectrum + + # In[12]: + + + results = like.results + + + print(results.display()) + + parameters = {par.name:results.get_variates(par.path) + for par in results.optimized_model["source"].parameters.values() + if par.free} + + results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) + + print(results.optimized_model["source"]) + + # Evaluate the flux and errors at a range of energies for the fitted and injected spectra, and the simulated source flux + + # In[13]: + + + energy = np.geomspace(100*u.keV,10*u.MeV).to_value(u.keV) + + flux_lo = np.zeros_like(energy) + flux_median = np.zeros_like(energy) + flux_hi = np.zeros_like(energy) + flux_inj = np.zeros_like(energy) + + for i, e in enumerate(energy): + flux = results_err(e) + flux_median[i] = flux.median + flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) + flux_inj[i] = spectrum_inj.evaluate_at(e) + + binned_energy_edges = crab.binned_data.axes['Em'].edges.value + binned_energy = np.array([]) + bin_sizes = np.array([]) + + for i in range(len(binned_energy_edges)-1): + binned_energy = np.append(binned_energy, (binned_energy_edges[i+1] + binned_energy_edges[i]) / 2) + bin_sizes = np.append(bin_sizes, binned_energy_edges[i+1] - binned_energy_edges[i]) + + expectation = response.expectation(data, copy = True) + + + # Plot the fitted and injected spectra + + # In[14]: + + + fig,ax = plt.subplots() + + ax.plot(energy, energy*energy*flux_median, label = "Best fit") + ax.fill_between(energy, energy*energy*flux_lo, energy*energy*flux_hi, alpha = .5, label = "Best fit (errors)") + ax.plot(energy, energy*energy*flux_inj, color = 'black', ls = ":", label = "Injected") + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") + + ax.legend() + + ax.set_ylim(.1,100) + + #plt.show() + + # Plot the fitted spectrum convolved with the response, as well as the simulated source counts + + # In[15]: + + + fig,ax = plt.subplots() + + ax.stairs(expectation.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response") + ax.stairs(expectation_inj.project('Em').todense().contents, binned_energy_edges, color='blue', label = "Injected spectrum convolved with response") + ax.errorbar(binned_energy, expectation.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) + ax.stairs(crab.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Source counts") + ax.errorbar(binned_energy, crab.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel("Counts") + + ax.legend() + + #plt.show() + + + # Plot the fitted spectrum convolved with the response plus the fitted background, as well as the simulated source+background counts + + # In[16]: + + expectation_bkg = bkg.expectation(data.axes, copy = True) + + fig,ax = plt.subplots() + + ax.stairs(expectation.project('Em').todense().contents + expectation_bkg.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response plus background") + ax.errorbar(binned_energy, expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) + ax.stairs(data.data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Total counts") + ax.errorbar(binned_energy, data.data.project('Em').todense().contents, yerr=np.sqrt(data.data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel("Counts") + + ax.legend() + + plt.show() + + +if __name__ == "__main__": + + import cProfile + cProfile.run('main()', filename = "prof_interfaces.prof") + exit() + + main() diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py new file mode 100644 index 00000000..77e66f3e --- /dev/null +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -0,0 +1,391 @@ +#!/usr/bin/env python +# coding: utf-8 + +import logging + +from astropy.utils.metadata.utils import dtype +from histpy import Histogram, HealpixAxis +from mhealpy import HealpixMap + +from cosipy.background_estimation.free_norm_threeml_binned_bkg import FreeNormBackgroundInterpolatedDensityTimeTagEmCDS +from cosipy.interfaces.expectation_interface import SumExpectationDensity +from cosipy.threeml.unbinned_model_folding import UnbinnedThreeMLModelFolding + +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', + level=logging.INFO) +logger = logging.getLogger(__name__) + +import cProfile + +from cosipy import test_data, BinnedData, UnBinnedData +from cosipy.data_io.EmCDSUnbinnedData import TimeTagEmCDSEventDataInSCFrameFromArrays, \ + TimeTagEmCDSEventDataInSCFrameFromDC3Fits, TimeTagEmCDSEventInSCFrame +from cosipy.event_selection.time_selection import TimeSelector +from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface +from cosipy.response.instrument_response_function import UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction +from cosipy.response.photon_types import PhotonWithDirectionAndEnergyInSCFrame +from cosipy.spacecraftfile import SpacecraftHistory +from cosipy.response.FullDetectorResponse import FullDetectorResponse +from cosipy.threeml.psr_fixed_ei import UnbinnedThreeMLPointSourceResponseTrapz +from cosipy.util import fetch_wasabi_file + +from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse + +import sys + +from scoords import SpacecraftFrame + +from astropy.time import Time +import astropy.units as u +from astropy.coordinates import SkyCoord, Galactic, Angle, UnitSphericalRepresentation, CartesianRepresentation, \ + angular_separation + +import numpy as np +import matplotlib.pyplot as plt + +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from astromodels import Parameter, Powerlaw + +from pathlib import Path + +import os + +def main(): + + use_bkg = True + + profile = cProfile.Profile() + + # Download all data + data_path = Path("") # /path/to/files. Current dir by default + + crab_data_path = data_path / "crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Sources/crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz', + output=str(crab_data_path), checksum='1d73e7b9e46e51215738075e91a52632') + + bkg_data_path = data_path / "AlbedoPhotons_3months_unbinned_data_filtered_with_SAAcut.fits.gz" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Backgrounds/Ge/AlbedoPhotons_3months_unbinned_data_filtered_with_SAAcut.fits.gz', + output=str(bkg_data_path), checksum='191a451ee597fd2e4b1cf237fc72e6e2') + + dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5" # path to detector response + fetch_wasabi_file( + 'COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', + output=str(dr_path), + checksum='eb72400a1279325e9404110f909c7785') + + sc_orientation_path = data_path / "DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori', + output=str(sc_orientation_path), checksum='b87fd41b6c28a5c0c51448ce2964e57c') + + binned_bkg_data_path = data_path / "bkg_binned_data.hdf5" + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', + output=str(binned_bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') + + # orientation history + # About 1 full orbit ~1.7 hr + tstart = Time("2028-03-01 02:00:00.117") + tstop = Time("2028-03-01 03:42:00.117") + sc_orientation = SpacecraftHistory.open(sc_orientation_path) + sc_orientation = sc_orientation.select_interval(tstart, tstop) + + # Prepare instrument response function + logger.info("Loading response....") + dr = FullDetectorResponse.open(dr_path) + irf = UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(dr) + logger.info("Loading response DONE") + + # Prepare data + selector = TimeSelector(tstart = sc_orientation.tstart, tstop = sc_orientation.tstop) + + logger.info("Loading data...") + if use_bkg: + data_file = [crab_data_path, bkg_data_path] + else: + data_file = crab_data_path + + data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(data_file, + selection=selector) + + logger.info("Loading data DONE") + + # Set background + + if use_bkg: + bkg = BinnedData(data_path / "background.yaml") + bkg.load_binned_data_from_hdf5(binned_data=str(binned_bkg_data_path)) + bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') + + # Workaround to avoid inf values. Our bkg should be smooth, but currently it's not. + bkg_dist += sys.float_info.min + + logger.info("Setting bkg...") + bkg = FreeNormBackgroundInterpolatedDensityTimeTagEmCDS(data, bkg_dist, sc_orientation, copy = False) + bkg.set_norm(5*u.Hz) + logger.info("Setting bkg DONE") + else: + bkg = None + + # Prepare point source response, which convolved the IRF with the SC orientation + ei_samples = np.geomspace(100, 5000, 100)*u.keV + psr = UnbinnedThreeMLPointSourceResponseTrapz(data, irf, sc_orientation, + ei_samples) + + # Prepare the model + l = 184.56 + b = -5.78 + + index = -2.26 + piv = 1 * u.MeV + K = 3e-6 / u.cm / u.cm / u.s / u.keV + + spectrum = Powerlaw() + + spectrum.index.min_value = -3 + spectrum.index.max_value = -1 + + # Fix it for testing purposes + spectrum.index.free = True + + spectrum.K.value = K.value + spectrum.piv.value = piv.value + + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + spectrum.index.delta = 0.01 + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + model = Model( + source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + + + # Set model folding + response = UnbinnedThreeMLModelFolding(psr) + + # response.set_model(model) # optional. Will be called by likelihood + # print(response.ncounts()) + # print(np.fromiter(response.expectation_density(), dtype = float)) + + # Setup likelihood + if use_bkg: + expectation_density = SumExpectationDensity(response, bkg) + else: + expectation_density = response + + # Test plots. REMOVE + # response.set_model(model) + # exdenlist = np.fromiter(expectation_density.expectation_density(), dtype=float) + + # plot expectation density energy + # energy = np.fromiter([e.energy_keV for e in data], dtype = float) + # fig,ax = plt.subplots() + # ax.scatter(energy, exdenlist) + # ax.set_xscale('log') + # ax.set_yscale('log') + # h = Histogram(np.geomspace(50,5000)) + # h.fill(energy) + # h /= h.axis.widths + # h *= np.max(exdenlist) / np.max(h) + # h.plot(ax) + # plt.show() + + # plot expectation density phi + # phi = np.fromiter([e.scattering_angle_rad for e in data], dtype = float) + # phi *= 180/3.1416 + # fig,ax = plt.subplots() + # ax.scatter(phi, exdenlist) + # h = Histogram(np.linspace(0,180)) + # h.fill(phi) + # h /= h.axis.widths + # h *= np.max(exdenlist) / np.max(h) + # h.plot(ax) + # plt.show() + + # Plot ARM + # attitudes = sc_orientation.interp_attitude(data.time) + + # psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) + # coord_vec = source.position.sky_coord.transform_to(sc_orientation.attitude.frame).cartesian.xyz.value + # sc_coord_vec = attitudes.rot.inv().apply(coord_vec) + # sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) + # arm = angular_separation(sc_coord_sph.lon, sc_coord_sph.lat, psichi_sc.lon, psichi_sc.lat).to_value(u.deg) - phi + # + + # psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) + # psichi_sc_vec = psichi_sc.to_cartesian().xyz.value + # psichi_gal_vec = attitudes.rot.apply(psichi_sc_vec.transpose()) + # psichi_coord = SkyCoord(CartesianRepresentation(*psichi_gal_vec.transpose()), frame = attitudes.frame) + # arm = source.position.sky_coord.separation(psichi_coord).to_value(u.deg) - phi + # + # h = Histogram(np.linspace(-90,90,360)) + # + # fig,ax = plt.subplots() + # ax.scatter(arm, exdenlist) + # + # h.fill(arm) + # + # h_ex = Histogram(h.axis) + # h_ex.fill(arm, weight=exdenlist) + # h_ex /= h # Mean + # + # h /= h.axis.widths + # h *= np.nanmax(h_ex) / np.max(h) # Normalize + # + # h.plot(ax, color = 'green') + # h_ex.plot(ax, color='red') + # + # plt.show() + + # Plot CDS + # fig = plt.figure() + # ax = fig.add_subplot(1, 1, 1, projection='mollview') + # + # sc = ax.scatter(psichi_coord.l.deg, psichi_coord.b.deg, transform=ax.get_transform('world'), + # c = phi , + # cmap='inferno', + # s=2, vmin=0, vmax=180) + # + # ax.scatter(source.position.sky_coord.l.deg, source.position.sky_coord.b.deg, transform=ax.get_transform('world'), marker='x', s=100, c='red') + # + # fig.colorbar(sc, fraction=.02, label="$\phi$ [deg]") + # + # m = HealpixMap(nside=128, coordsys='galactic') + # m[:] = source.position.sky_coord.separation(m.pix2skycoord(np.arange(m.npix))).to_value(u.deg) + # img = m.get_wcs_img(ax, coord='C') #Use C for a "bug" in healpy (doesn't work the same as plot() + # ax.contour(img, levels=np.arange(0, 180, 10), cmap='inferno', + # vmin=0, vmax=180) + # plt.show() + + + + like_fun = UnbinnedLikelihood(expectation_density) + + cosi = ThreeMLPluginInterface('cosi', like_fun, response, bkg) + + # Nuisance parameter guess, bounds, etc. + if use_bkg: + cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter + 2.5, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=100, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + + plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) + + like = JointLikelihood(model, plugins, verbose = False) + + # Run + print(data.nevents, expectation_density.expected_counts()) + profile.enable() + like.fit() + profile.disable() + profile.dump_stats("prof_interfaces.prof") + + results = like.results + + # Plot the fitted and injected spectra + + # In[14]: + + + fig, ax = plt.subplots() + + alpha_inj = -1.99 + beta_inj = -2.32 + E0_inj = 531. * (alpha_inj - beta_inj) * u.keV + xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) + piv_inj = 100. * u.keV + K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV + + spectrum_inj = Band() + + spectrum_inj.alpha.min_value = -2.14 + spectrum_inj.alpha.max_value = 3.0 + spectrum_inj.beta.min_value = -5.0 + spectrum_inj.beta.max_value = -2.15 + spectrum_inj.xp.min_value = 1.0 + + spectrum_inj.alpha.value = alpha_inj + spectrum_inj.beta.value = beta_inj + spectrum_inj.xp.value = xp_inj.value + spectrum_inj.K.value = K_inj.value + spectrum_inj.piv.value = piv_inj.value + + spectrum_inj.xp.unit = xp_inj.unit + spectrum_inj.K.unit = K_inj.unit + spectrum_inj.piv.unit = piv_inj.unit + + energy = np.geomspace(100 * u.keV, 10 * u.MeV).to_value(u.keV) + + flux_lo = np.zeros_like(energy) + flux_median = np.zeros_like(energy) + flux_hi = np.zeros_like(energy) + flux_inj = np.zeros_like(energy) + + parameters = {par.name: results.get_variates(par.path) + for par in results.optimized_model["source"].parameters.values() + if par.free} + + results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) + + for i, e in enumerate(energy): + flux = results_err(e) + flux_median[i] = flux.median + flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) + flux_inj[i] = spectrum_inj.evaluate_at(e) + + ax.plot(energy, energy * energy * flux_median, label="Best fit") + ax.fill_between(energy, energy * energy * flux_lo, energy * energy * flux_hi, alpha=.5, label="Best fit (errors)") + ax.plot(energy, energy * energy * flux_inj, color='black', ls=":", label="Injected") + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") + + ax.legend() + + ax.set_ylim(.1,100) + + plt.show() + + # Grid + if use_bkg: + loglike = Histogram([np.geomspace(5e-6, 15e-6, 30), + np.geomspace(4, 5, 31)], labels=['K', 'B'], axis_scale='log') + + for i, k in enumerate(loglike.axes['K'].centers): + for j, b in enumerate(loglike.axes['B'].centers): + spectrum.K.value = k + cosi.bkg_parameter['bkg_norm'].value = b + + loglike[i, j] = cosi.get_log_like() + + else: + loglike = Histogram([np.geomspace(2e-6, 2e-4, 30)], labels=['K'], axis_scale='log') + + for i, k in enumerate(loglike.axes['K'].centers): + spectrum.K.value = k + + loglike[i] = cosi.get_log_like() + + ax, plot = loglike.plot(vmin = np.max(loglike) - 25, vmax = np.max(loglike)) + + plt.show() + + return + + +if __name__ == "__main__": + + main() \ No newline at end of file diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py new file mode 100755 index 00000000..5ead0656 --- /dev/null +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -0,0 +1,163 @@ +import logging + +from cosipy.util import fetch_wasabi_file + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s' + ) + +import sys + +from mhealpy import HealpixBase + +from matplotlib import pyplot as plt + +from cosipy.statistics import PoissonLikelihood + +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse + +from cosipy import BinnedData +from cosipy.spacecraftfile import SpacecraftHistory +from cosipy.response.FullDetectorResponse import FullDetectorResponse + +from astropy.time import Time +import astropy.units as u + +import numpy as np + +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from astromodels import Parameter + +from pathlib import Path + +import os + +def main(): + + # Download data + data_path = Path("") # /path/to/files. Current dir by default + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_bkg_binned_data.hdf5', output=str(data_path / 'grb_bkg_binned_data.hdf5'), checksum = 'fce391a4b45624b25552c7d111945f60') + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5', output=str(data_path / 'grb_binned_data.hdf5'), checksum = 'fcf7022369b6fb378d67b780fc4b5db8') + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/bkg_binned_data_1s_local.hdf5', output=str(data_path / 'bkg_binned_data_1s_local.hdf5'), checksum = 'b842a7444e6fc1a5dd567b395c36ae7f') + # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') + # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + + dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5" # path to detector response + fetch_wasabi_file( + 'COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', + output=str(dr_path), + checksum='eb72400a1279325e9404110f909c7785') + + # Set model to fit + l = 93. + b = -53. + + alpha = -1 + beta = -3 + xp = 450. * u.keV + piv = 500. * u.keV + K = 1 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + spectrum.beta.min_value = -15.0 + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + # Date preparation + binned_data = BinnedData(data_path / "grb.yaml") + binned_data.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") + + bkg = BinnedData(data_path / "background.yaml") + + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") + + bkg_tmin = 1842597310.0 + bkg_tmax = 1842597550.0 + bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] + bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] + bkg_dist = bkg.binned_data.slice[{'Time': slice(bkg_min, bkg_max)}].project('Em', 'Phi', 'PsiChi') + + tmin = Time(1842597410.0, format='unix') + tmax = Time(1842597450.0, format='unix') + ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) + ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring + + # Prepare instrument response + dr = FullDetectorResponse.open(dr_path) + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + bkg_dist += sys.float_info.min + + # ============ Interfaces ============== + data = binned_data.get_em_cds() + + bkg = FreeNormBinnedBackground(bkg_dist, + sc_history=ori, + copy = False) + + instrument_response = BinnedInstrumentResponse(dr) + + # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, + # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. + # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation + # can provide the response for an arbitrary directions, Ei and Pol values. + # NOTE: this is currently only implemented for data in local coords + psr = BinnedThreeMLPointSourceResponse(data = data, + instrument_response = instrument_response, + sc_history=ori, + energy_axis = dr.axes['Ei'], + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside = 2*data.axes['PsiChi'].nside) + + response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) + + like_fun = PoissonLikelihood(data, response, bkg) + + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response, + bkg) + + # Nuisance parameter guess, bounds, etc. + cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter + 1, + unit = u.Hz,# initial value of parameter + min_value=0, # minimum value of parameter + max_value=5, # maximum value of parameter + delta=1e-3, # initial step used by fitting engine + ) + + # ======== Interfaces end ========== + + # 3Ml fit. Same as before + plugins = DataList(cosi) + model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + like = JointLikelihood(model, plugins) + like.fit() + results = like.results + print(results.display()) + + +if __name__ == "__main__": + + import cProfile + cProfile.run('main()', filename = "prof.prof") + exit() + + main() \ No newline at end of file diff --git a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py new file mode 100644 index 00000000..5e29b331 --- /dev/null +++ b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py @@ -0,0 +1,287 @@ +import logging +logging.basicConfig( + level=logging.INFO, # Set the logging level to INFO + format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s' +) +logger = logging.getLogger(__name__) + +import cProfile +import time + +import numpy as np +from astropy.coordinates import SkyCoord, Angle + +from astropy import units as u +from cosipy.polarization import StereographicConvention +from cosipy.response.ideal_response import IdealComptonIRF, UnpolarizedIdealComptonIRF, ExpectationFromLineInSCFrame, RandomEventDataFromLineInSCFrame +from cosipy.response.relative_coordinates import RelativeCDSCoordinates +from cosipy.statistics import UnbinnedLikelihood +from histpy import Histogram, Axis, HealpixAxis +from matplotlib import pyplot as plt +from scoords import SpacecraftFrame + +from mhealpy import HealpixMap +from tqdm import tqdm + +#plt.ion() + +# ==== Initial setup ==== +# Simulated source parameters +source_energy = 500 * u.keV +source_direction = SkyCoord(lon = 0, lat = 60, unit = 'deg', frame = SpacecraftFrame()) +source_flux = 1 / u.cm / u.cm / u.s +source_pd = .7 +source_pa = 80 * u.deg +pol_convention = StereographicConvention() + +# The integrated time of the observation. Increase it to get more statistics +# which is good for visualizing the data, but that will maka the analysis slower +duration = 10*u.s + +# Instrument Response Function (IRF) definitions +# The "unpolarized" response returns an average over all +# polarization angles. +irf_pol = IdealComptonIRF.cosi_like() +irf_unpol = UnpolarizedIdealComptonIRF.cosi_like() + +# Simulate data sampling from the IRF itself +# This simulated a monochromatic source at a fixed direction in the SC coordinate frame + +# profile = cProfile.Profile() +# profile.enable() +# tstart = time.perf_counter() +logger.info("Simulating data...") +data = RandomEventDataFromLineInSCFrame(irf = irf_unpol, + flux = source_flux, + duration = duration, + energy=source_energy, + direction = source_direction, + polarized_irf= irf_pol, + polarization_degree=source_pd, + polarization_angle=source_pa, + polarization_convention=pol_convention) + +# Get the measured energy (Em) and the Compton Data Space (CDS) (CDS = Phi and PsiChi) +measured_energy = data.energy +phi = data.scattering_angle +psichi = data.scattered_direction_sc + +logger.info(f"Got {data.nevents} events.") + +# ======= Data visualization ====== + +fig,ax = plt.subplots(2, 3, figsize = [18,8]) + +# This is a visualization of the Compton cone. Instead of drawing it in 3D space, +# we'll use color to represent the scattering angle Phi, which is usually the z-axis a Compton cone plot. +# The location of the source is marked by an X, and the direction of each scattered photon (PsiChi) is represented +# with a dot +ax[0,0].set_axis_off() # Replace corner plot with axis suitable for spherical data +sph_ax = fig.add_subplot(2,3,1, projection = 'mollview') + +sc = sph_ax.scatter(psichi.lon.deg, psichi.lat.deg, transform = sph_ax.get_transform('world'), c = phi.to_value('deg'), cmap = 'inferno', + s = 2, vmin = 0, vmax = 180) +sph_ax.scatter(source_direction.lon.deg, source_direction.lat.deg, transform = sph_ax.get_transform('world'), marker = 'x', s = 100, c = 'red') +fig.colorbar(sc, orientation="horizontal", fraction = .05, label = "phi [deg]") + +sph_ax.set_title("Compton Data Space") + +# While the data live in this complex 4-D space (Em + CDS) it is useful to make some cuts and projections +# to visualize it. For this we use the following coordinates, which are relative to hypothetical source +# (or, in this case, a known source, since we simulated it) +# Epsilon: fractional difference in energy which respect to the energy of the source +# Phi_geometric: angular distance between the source location and PsiChi +# Theta_ARM = the difference between Phi (which is computed exclusively from kinematics) and Phi_geometric +# Zeta: the azimuthal scattering direction, computed from PsiChi once a particular source direction is assumed. +# The zeta=0 direction is arbitrary, and is defined by the polarization convention. +eps = ((measured_energy - source_energy) / source_energy).to_value('') +phi_geom,zeta = RelativeCDSCoordinates(source_direction, pol_convention).to_relative(psichi) +theta_arm = phi_geom - phi + +rel_binned_data = Histogram([Axis(np.linspace(-1,1.1,200), scale = 'linear', label='eps'), + Axis(np.linspace(0, 180, 180)*u.deg, scale='linear', label='phi'), + Axis(np.linspace(-180, 180, 180)*u.deg, scale='linear', label='arm'), + Axis(np.linspace(-180, 180, 180) * u.deg, scale='linear', label='az')]) + +rel_binned_data.fill(eps, phi, theta_arm, zeta) + +rel_binned_data.slice[{'phi':slice(30,120)}].project('az').rebin(5).plot(ax[1,0],errorbars = True) +ax[1,0].set_title("Azimuthal Scattering Angle Distribution (ASAD)") + +rel_binned_data.project(['arm','phi']).rebin(3,5).plot(ax[0,1]) +ax[0,1].set_title('Compton cone "wall"') + +rel_binned_data.project('phi').rebin(5).plot(ax[0,2],errorbars = True) +ax[0,2].set_title("Polar Scattering Angle") + +rel_binned_data.project('arm').rebin(3).plot(ax[1,1],errorbars = True) +ax[1,1].set_title("Angular Resolution Measure (ARM)") + +rel_binned_data.project('eps').plot(ax[1,2],errorbars = True) +ax[1,2].set_title("Energy dispersion") + +fig.subplots_adjust(left=.05, right=.95, top=.95, bottom=.1, wspace=0.2, hspace=0.4) + +plt.show() + +# ===== Likelihood setup ===== + +# In order to compute the likelihood we need to know how many counts we expect and, +# if they are detected, what is the probability density of having obtained a +# specific Em+CDS set of parameters. All of this is computed from the IRF's +# effective area and event probability density functions (PDFs). +# Since we used exactly the same effective area and PDFs to simulated our event, +# then we should get the "perfect" result. There will be statistical fluctuations +# resulting in a statistical error, but no systematic error. + +expectation = ExpectationFromLineInSCFrame(data, + irf=irf_unpol, + flux=source_flux, + duration=duration, + energy=source_energy, + direction=source_direction, + polarized_irf=irf_pol, + polarization_degree=source_pd, + polarization_angle=source_pa, + polarization_convention=pol_convention) + +likelihood = UnbinnedLikelihood(expectation) + +# ==== Fits ==== + +# We'll use a brute-force maximum-likelihood estimation technique. That is, we'll compute +# likelihood as a function of all free parameters, get the combination that maximizes the likelihood, +# and use Wilks theorem to obtain an estimate of the errors. + +# We'll only free one parameter at a time, and set all others to known values. +# The flux will always be a "nuisance" parameter + +fit_energy = True +fit_direction = True +direction_nside = 128 # Decrease/increase it to get a better/worse TS map. It'll be faster/slower +fit_pa_pd = True + +# ==== Free the source energy ==== +if fit_energy: + # Set everything to the injection values + expectation.set_model(flux=source_flux, + energy=source_energy, + direction=source_direction, + polarization_degree=source_pd, + polarization_angle=source_pa) + + # Compute the likelihood on a grid + loglike = Histogram([Axis(np.linspace(.8, 1.2, 11)/u.cm/u.cm/u.s, label = 'flux'), + Axis(np.linspace(499, 501, 10)*u.keV, label = 'Ei')]) + + for j, ei_j in tqdm(list(enumerate(loglike.axes['Ei'].centers)), desc="Likelihood (free energy)"): + for i,flux_i in enumerate(loglike.axes['flux'].centers): + + expectation.set_model(flux = flux_i, energy = ei_j) + loglike[i,j] = likelihood.get_log_like() + + # Use Wilks theorem to get a 90% confidence interval + ts = 2 * (loglike - np.max(loglike)) + ax,_ = ts.plot(vmin = -4.61) + ax.scatter(source_flux.to_value(loglike.axes['flux'].unit), source_energy.to_value(loglike.axes['Ei'].unit), + color='red') + ax.get_figure().axes[-1].set_ylabel("TS") + + plt.show() + + +# ==== Free the source direction ==== +if fit_direction: + # Set everything to the injection values + expectation.set_model(flux=source_flux, + energy=source_energy, + direction=source_direction, + polarization_degree=source_pd, + polarization_angle=source_pa) + + loglike = Histogram([Axis(np.linspace(.8, 1.2, 11)/u.cm/u.cm/u.s, label = 'flux'), + HealpixAxis(nside = direction_nside, label = 'direction', coordsys=SpacecraftFrame())]) + + loglike[:] = np.nan + + sample_pixels = loglike.axes['direction'].query_disc(source_direction.cartesian.xyz, np.deg2rad(3)) + for j, pix in tqdm(list(enumerate(sample_pixels)), desc="Likelihood (direction)"): + + coord_pix = loglike.axes['direction'].pix2skycoord(pix) + + for i,flux_i in enumerate(loglike.axes['flux'].centers): + + expectation.set_model(flux = flux_i, direction = coord_pix) + + loglike[i,pix] = likelihood.get_log_like() + + + fig,ax = plt.subplots(1,2, figsize = [10,4]) + + ax[0].set_axis_off() # Replace corner plot with axis suitable for spherical data + sph_ax = fig.add_subplot(1, 2, 1, projection='cartview', latra = source_direction.lat.deg + [-3,3], lonra = source_direction.lon.deg + np.asarray([-3,3])/np.cos(source_direction.lat.rad)) + sph_ax.coords[0].set_ticks_visible(True) + sph_ax.coords[1].set_ticks_visible(True) + sph_ax.coords[0].set_ticklabel_visible(True) + sph_ax.coords[1].set_ticklabel_visible(True) + + direction_profile_loglike = HealpixMap(np.nanmax(loglike, axis = 0)) + ts_direction = 2*(direction_profile_loglike - np.nanmax(direction_profile_loglike)) + ts_direction.plot(sph_ax, vmin = -4.61) + sph_ax.set_title("Location TS map") + sph_ax.get_figure().axes[-1].set_xlabel("TS") + sph_ax.scatter(source_direction.lon.deg, source_direction.lat.deg, transform=sph_ax.get_transform('world'), + marker='x', s=100, c='red') + + flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = 1)) + ts_flux = 2*(flux_prof_loglike - np.nanmin(flux_prof_loglike)) + ts_flux.plot(ax[1]) + ax[1].axvline(source_flux.to_value(loglike.axes['flux'].unit), color = 'red', ls = ':') + ax[1].set_title("Flux TS profile") + + plt.show() + +# ==== Free PD and PA ==== +if fit_pa_pd: + # Set everything to the injection values + expectation.set_model(flux=source_flux, + energy=source_energy, + direction=source_direction, + polarization_degree=source_pd, + polarization_angle=source_pa) + + loglike = Histogram([Axis(np.linspace(.8, 1.2, 11) / u.cm / u.cm / u.s, label='flux'), + Axis(np.linspace(40,120,10)*u.deg, label='PA'), + Axis(np.linspace(0, 1, 11), label='PD'), + ]) + + for j, pa_j in tqdm(list(enumerate(loglike.axes['PA'].centers)), desc="Likelihood (polarization)"): + for k, pd_k in enumerate(loglike.axes['PD'].centers): + for i,flux_i in enumerate(loglike.axes['flux'].centers): + + expectation.set_model(flux = flux_i, + polarization_degree = pd_k, + polarization_angle = pa_j) + + loglike[i,j,k] = likelihood.get_log_like() + + fig,ax = plt.subplots(1,2, figsize = [10,4]) + + flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = (1,2))) + ts_flux = 2 * (flux_prof_loglike - np.nanmin(flux_prof_loglike)) + ts_flux.plot(ax[0]) + ax[0].axvline(source_flux.to_value(loglike.axes['flux'].unit), color='red', ls=':') + ax[0].set_ylabel("TS") + ax[0].set_title("Flux TS profile") + + pol_prof_loglike = Histogram([loglike.axes['PA'], loglike.axes['PD']], contents=np.nanmax(loglike, axis=0)) + + ts_pol = 2 * (pol_prof_loglike - np.nanmax(pol_prof_loglike)) + ts_pol.plot(ax[1], vmin=-4.61) + ax[1].scatter(source_pa.to_value(u.deg), source_pd, color='red') + ax[1].get_figure().axes[-1].set_ylabel("TS") + ax[1].set_title("PA-PD TS profile") + + plt.show() + +plt.show() \ No newline at end of file diff --git a/docs/api/interfaces/examples/toy/toy_implementations.py b/docs/api/interfaces/examples/toy/toy_implementations.py new file mode 100644 index 00000000..b36ca8d4 --- /dev/null +++ b/docs/api/interfaces/examples/toy/toy_implementations.py @@ -0,0 +1,351 @@ +import itertools +from typing import Dict, Iterator, Iterable, Optional, Type + +from astromodels.sources import Source +import astropy.units as u +from astropy.time import Time +from astropy.units import Quantity + +from cosipy.interfaces.background_interface import BackgroundDensityInterface +from cosipy.interfaces.data_interface import DataInterface, TimeTagEventDataInterface +from cosipy.interfaces.event_selection import EventSelectorInterface + +from cosipy.interfaces import (BinnedDataInterface, + BinnedBackgroundInterface, + BinnedThreeMLModelFoldingInterface, + BinnedThreeMLSourceResponseInterface, + UnbinnedThreeMLSourceResponseInterface, + UnbinnedThreeMLModelFoldingInterface, + EventInterface, + ThreeMLSourceResponseInterface, + TimeTagEventInterface) + +from histpy import Axis, Axes, Histogram +import numpy as np +from scipy.stats import norm, uniform + +from threeML import Constant, PointSource, Model, JointLikelihood, DataList + +from matplotlib import pyplot as plt + +import copy + +""" +This is an example on how to use the new interfaces. + +To keep things simple, example itself is a toy model. +It a 1D model, with a Gaussian signal on top of a flat +uniform background. You can execute it until the end +to see a plot on how it looks like. + +It looks nothing like COSI data, but +shows how generic the interfaces can be. +""" + +# ======== Create toy interfaces for this model =========== +class ToyEvent(TimeTagEventInterface, EventInterface): + """ + Unit-less 1D data of a measurement called "x" (could be anything) + """ + + data_space_units = u.s + + def __init__(self, index:int, x:float, time:Time): + self._id = index + self._x = x + self._jd1 = time.jd1 + self._jd2 = time.jd2 + + @property + def id(self): + return self._id + + @property + def x(self): + return self._x + + @property + def jd1(self): + return self._jd1 + + @property + def jd2(self): + return self._jd2 + +class ToyData(DataInterface): + pass + +class ToyEventDataStream(ToyData): + # This simulates reading event from file + # Check that they are not being read twice + + def __init__(self, nevents_signal, nevents_bkg, min_value, max_value, tstart, tstop): + + rng = np.random.default_rng() + + signal = rng.normal(size=nevents_signal) + bkg = rng.uniform(min_value, max_value, size=nevents_bkg) + + self._x = np.append(signal, bkg) + + self._tstart = tstart + self._tstop = tstop + + dt = np.random.uniform(size=self._x.size) + dt_sort = np.argsort(dt) + self._x = self._x[dt_sort] + dt = dt[dt_sort] + + self._timestamps = self._tstart + dt * u.day + + def __iter__(self) -> Iterator[ToyEvent]: + print("Loading events!") + for n,(x,t) in enumerate(zip(self._x, self._timestamps)): + yield ToyEvent(n,x,t) + +class ToyEventData(TimeTagEventDataInterface, ToyData): + # Random data. Normal signal on top of uniform bkg + + event_type = ToyEvent + + def __init__(self, loader:ToyEventDataStream, selector:EventSelectorInterface = None): + + self._loader = selector(loader) + self._cached_iter = None + self._nevents = None # After selection + + def __iter__(self) -> Iterator[ToyEvent]: + + if self._cached_iter is None: + # First call. Split. Keep one and return the other + self._loader, self._cached_iter = itertools.tee(self._loader) + return self._cached_iter + else: + # Following calls: tee the loader again + self._loader, new_iter = itertools.tee(self._loader) + return new_iter + + @property + def nevents(self) -> int: + if self._nevents is None: + # Not cached yet + self._nevents = sum(1 for _ in self) + + return self._nevents + + @property + def x(self): + return np.asarray([e.x for e in self]) + + @property + def jd1(self) -> Iterable[float]: + return np.asarray([e.jd1 for e in self]) + + @property + def jd2(self) -> Iterable[float]: + return np.asarray([e.jd2 for e in self]) + +class ToyBinnedData(BinnedDataInterface, ToyData): + + def __init__(self, data:Histogram): + + if data.ndim != 1: + raise ValueError("ToyBinnedData only take a 1D histogram") + + if data.axis.label != 'x': + raise ValueError("ToyBinnedData requires an axis labeled 'x'") + + self._data = data + + @property + def data(self) -> Histogram: + return self._data + + @property + def axes(self) -> Axes: + return self._data.axes + + def fill(self, event_data:Iterable[ToyEvent]): + + x = np.fromiter([e.x for e in event_data], dtype = float) + + self._data.fill(x) + +class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): + """ + Models a uniform background + + # Since the interfaces are Protocols, they don't *have* + # to derive from the base class, but doing some helps + # code readability, especially if you use an IDE. + """ + + def __init__(self, data: ToyEventData, duration:Quantity, axis:Axis): + + self._data = data + self._duration = duration.to_value(u.s) + self._unit_expectation = Histogram(axis) + self._unit_expectation[:] = self._duration / self._unit_expectation.nbins + self._norm = 1 # Hz + + self._unit_expectation_density = self._duration / (axis.hi_lim - axis.lo_lim) + + @property + def event_type(self) -> Type[EventInterface]: + return ToyEvent + + def set_parameters(self, **parameters:u.Quantity) -> None: + self._norm = parameters['norm'].to_value(u.Hz) + + def expected_counts(self) -> float: + return self._norm * self._duration + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + + for _ in itertools.islice(self._data, start, stop): + yield self._norm * self._unit_expectation_density + + @property + def parameters(self) -> Dict[str, u.Quantity]: + return {'norm': u.Quantity(self._norm, u.Hz)} + + def expectation(self, axes:Axes, copy = True) -> Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + # Always a copy + return self._unit_expectation * self._norm + +class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThreeMLSourceResponseInterface): + """ + This models a Gaussian signal in 1D, centered at 0 and with std = 1. + The normalization --the "flux"-- is the only free parameters + """ + + def __init__(self, data: ToyEventData, duration:Quantity, axis:Axis): + self._data = data + self._source = None + self._duration = duration.to_value(u.s) + self._unit_expectation = Histogram(axis, + contents= self._duration * np.diff(norm.cdf(axis.edges))) + + @property + def event_type(self) -> Type[EventInterface]: + return ToyEvent + + def expected_counts(self) -> float: + + if self._source is None: + raise RuntimeError("Set a source first") + + # Get the latest values of the flux + # Remember that _model can be modified externally between calls. + # This response doesn't have effective area or energy sensitivity. We're just using K as a rate + ns_events = self._duration * self._source.spectrum.main.shape.k.value + return ns_events + + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + + cache = norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) + + for prob in cache: + yield prob + + # Alternative version without cache (slower) + # for event in itertools.islice(self._data, start, stop): + # yield norm.pdf(event.x) + + def set_source(self, source: Source): + + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + self._source = source + + def expectation(self, axes:Axes, copy = True) -> Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + if self._source is None: + raise RuntimeError("Set a source first") + + # Get the latest values of the flux + # Remember that _model can be modified externally between calls. + # Always copies + return self._unit_expectation * self._source.spectrum.main.shape.k.value + + def copy(self) -> "ToyPointSourceResponse": + # We are not caching any results, so it's safe to do shallow copy without + # re-initializing any member. + return copy.copy(self) + +class ToyModelFolding(BinnedThreeMLModelFoldingInterface, UnbinnedThreeMLModelFoldingInterface): + + def __init__(self, data:ToyEventData, psr: ToyPointSourceResponse): + + self._data = data + self._model = None + + self._psr = psr + self._psr_copies = {} + + @property + def event_type(self): + return ToyEvent + + def expected_counts(self) -> float: + + ncounts = 0 + + for source_name,psr in self._psr_copies.items(): + ncounts += psr.expected_counts() + + return ncounts + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + + self._cache_psr_copies() + + if not self._psr_copies: + for _ in itertools.islice(self._data, start, stop): + yield 0 + else: + for expectation in zip(*[p.expectation_density() for p in self._psr_copies.values()]): + yield np.sum(expectation) + + def set_model(self, model: Model): + + self._model = model + + def _cache_psr_copies(self): + + new_psr_copies = {} + + for name,source in self._model.sources.items(): + + if name in self._psr_copies: + # Use cache + new_psr_copies[name] = self._psr_copies[name] + + psr_copy = self._psr.copy() + psr_copy.set_source(source) + + new_psr_copies[name] = psr_copy + + self._psr_copies = new_psr_copies + + def expectation(self, axes:Axes, copy = True) -> Histogram: + + self._cache_psr_copies() + + expectation = Histogram(axes) + + for source_name,psr in self._psr_copies.items(): + expectation += psr.expectation(axes, copy = False) + + # Always a copy + return expectation + +# ======= Actual code. This is how the "tutorial" will look like ================ \ No newline at end of file diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py new file mode 100644 index 00000000..e1119c8e --- /dev/null +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -0,0 +1,170 @@ +from toy_implementations import * + +from astromodels import Parameter +import astropy.units as u +from astropy.time import Time +from cosipy.event_selection.time_selection import TimeSelector +from cosipy.interfaces.expectation_interface import SumExpectationDensity + +from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood + +from cosipy.interfaces import ThreeMLPluginInterface +from histpy import Axis, Histogram +import numpy as np + +from threeML import Constant, PointSource, Model, JointLikelihood, DataList + +from matplotlib import pyplot as plt + + +def main(): + + # This axis is user for binning the data in the binned analysis case + # The unbinned analysis also uses to lower and upper limits, as well as for plotting + toy_axis = Axis(np.linspace(-5, 5), label='x') + + # Some options + unbinned = False # Binned=False or unbinned=True + plot = True # Plots the fit + use_signal = True # False = bkg-only + use_bkg = True # False = signal-only + + # This simulates a stream of events. It can come from a file or some other source + # ToyEventDataStream and ToyEventData could have been simplified into a single + # class, but I wanted to exercise the case of a consumable stream, which is + # cached by ToyEventData and used in the rest of the analysis. + # The event have an 'x' value and time. + # For the signal, the 'x' values are randomly drawn from a standard normal distribution + # For the background, the 'x' value are randomly drawn from a uniform distribution + # The timestamps are randomly drawn from a uniform distribution in both cases. + # All the events are time-sorted. + data_loader = ToyEventDataStream(nevents_signal= 1000 if use_signal else 0, + nevents_bkg= 1000 if use_bkg else 0, + min_value=toy_axis.lo_lim, + max_value=toy_axis.hi_lim, + tstart=Time("2000-01-01T00:00:00"), + tstop=Time("2000-01-02T00:00:00")) + + # Make a selection. A simple time selection in this case + # TimeSelector assumed the events are time-sorted and will stop the stream + # of events once tstop is reached + tstart = Time("2000-01-01T01:00:00") + tstop = Time("2000-01-01T10:00:00") + duration = tstop - tstart + selector = TimeSelector(tstart = tstart, tstop = tstop) + + event_data = ToyEventData(data_loader, selector=selector) + + # This is the expectation from a single source, which is just the standard normal + # distribution + # This class handles both the binned and the unbinned case. + psr = ToyPointSourceResponse(data = event_data, duration = duration, axis = toy_axis) + + # This combines the expectation from multiple + model_folding = ToyModelFolding(data = event_data, psr = psr) + + if use_bkg: + # The expectation from background, which is flat + # This class handles both the binned and the unbinned case + bkg = ToyBkg(data = event_data, duration = duration, axis = toy_axis) + else: + bkg = None + + # Source model + # Since this is a toy model with no position or energy dependence, + # we'll just use the normalization K value and ignore the units + # The default units are 1 / (keV s cm2), which make sure for an astrophysical + # source, but for this toy model. + spectrum = Constant() + + if use_signal: + spectrum.k.value = .01 + else: + spectrum.k.value = 0 + spectrum.k.free = False + + spectrum.k.min_value = 0 + + source = PointSource("arbitrary_source_name", + l=0, b=0, # Doesn't matter + spectral_shape=spectrum) + + model = Model(source) + + # Data binning true the interface fill() method + binned_data = None + if plot or not unbinned: + binned_data = ToyBinnedData(Histogram(toy_axis)) + binned_data.fill(event_data) + + # Set the likelihood function we'll use + if unbinned: + expectation_density = SumExpectationDensity(model_folding, bkg) + like_fun = UnbinnedLikelihood(expectation_density) + else: + like_fun = PoissonLikelihood(binned_data, model_folding, bkg) + + + # Initiate the 3ML plugin + # This plugin will internally call + # response.set_model() and bkg.set_parameter() + # which will cause the like_fun result to change on each call + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response = model_folding, + bkg = bkg) + + # Before the fit, you can set the background parameters initial values, bounds, etc. + # This is passed to the minimizer. + # The source model parameters were already set above + if bkg is not None: + cosi.bkg_parameter['norm'] = Parameter("norm", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=1, # maximum value of parameter + delta=0.001, # initial step used by fitting engine + free = True) + + # Fit + plugins = DataList(cosi) # Each instrument or data set + like = JointLikelihood(model, plugins) # Everything connects here + + like.fit() + print(like.minimizer) + + # Plot results + if plot: + + fig, ax = plt.subplots() + + + if unbinned: + # Divide by bin width to plot the density + (binned_data.data/toy_axis.widths).plot(ax) + + # Get the expectation density from the fitted result for each event + expectation_density_list = np.fromiter(expectation_density.expectation_density(), dtype=float) + ax.scatter(event_data.x, expectation_density_list, s=1, color='green') + + ax.set_ylabel("Counts density") + else: + binned_data.data.plot(ax) + expectation = model_folding.expectation(binned_data.axes) + + if bkg is not None: + expectation = expectation + bkg.expectation(binned_data.axes) + + expectation.plot(ax) + + ax.set_ylabel("Counts") + + plt.show() + +if __name__ == "__main__": + + import cProfile + cProfile.run('main()', filename = "prof_toy.prof") + exit() + + main() \ No newline at end of file diff --git a/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb b/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb index 692c0437..38060ac7 100644 --- a/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb +++ b/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "e40c9735", "metadata": { "tags": [] }, @@ -22,6 +23,7 @@ { "cell_type": "code", "execution_count": 1, + "id": "8a0f4b8b", "metadata": { "tags": [] }, @@ -266,7 +268,7 @@ ], "source": [ "from cosipy.background_estimation import ContinuumEstimation\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.util import fetch_wasabi_file\n", "import os\n", "import logging\n", @@ -280,6 +282,7 @@ }, { "cell_type": "markdown", + "id": "22076637", "metadata": {}, "source": [ "The notebook requires the following files:\n", @@ -294,6 +297,7 @@ { "cell_type": "code", "execution_count": null, + "id": "a1e669ed", "metadata": {}, "outputs": [], "source": [ @@ -304,6 +308,7 @@ { "cell_type": "code", "execution_count": null, + "id": "36497975", "metadata": { "tags": [] }, @@ -316,6 +321,7 @@ { "cell_type": "code", "execution_count": null, + "id": "6dafa64a", "metadata": {}, "outputs": [], "source": [ @@ -328,6 +334,7 @@ { "cell_type": "code", "execution_count": null, + "id": "ec8e59c4", "metadata": {}, "outputs": [], "source": [ @@ -338,6 +345,7 @@ { "cell_type": "code", "execution_count": null, + "id": "e14f1bfc", "metadata": {}, "outputs": [], "source": [ @@ -347,6 +355,7 @@ }, { "cell_type": "markdown", + "id": "42d1d040", "metadata": {}, "source": [ "Define instance of class:" @@ -355,6 +364,7 @@ { "cell_type": "code", "execution_count": 2, + "id": "6ade7276", "metadata": { "tags": [] }, @@ -365,6 +375,7 @@ }, { "cell_type": "markdown", + "id": "78da7f08", "metadata": {}, "source": [ "In order to estimate the background, we need the point source response. If you don't already have this, you can calculate it, as shown below. Note that the coordinates of the Crab need to be passed as a tuple, giving Galactic longitude and latitude in degrees. " @@ -373,6 +384,7 @@ { "cell_type": "code", "execution_count": 3, + "id": "7cf312d4", "metadata": { "tags": [] }, @@ -382,7 +394,7 @@ "ori_file = data_path/\"DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori\"\n", "\n", "# Spacecraft orientation:\n", - "sc_orientation = SpacecraftFile.parse_from_file(ori_file)\n", + "sc_orientation = SpacecraftHistory.parse_from_file(ori_file)\n", "\n", "crab = SkyCoord(l=184.56*u.deg,b=-5.78*u.deg,frame=\"galactic\")\n", "psr = instance.calc_psr(sc_orientation, dr, crab)\n", @@ -393,6 +405,7 @@ }, { "cell_type": "markdown", + "id": "0f665d91", "metadata": {}, "source": [ "Now let's calculate the estimated background. To make a short example, we'll only consider 1 Em bin and 2 Phi bins, as specified by the optional keywords e_loop and s_loop, respectively. We'll also make plots here for demonstrational purposes. \n", @@ -403,6 +416,7 @@ { "cell_type": "code", "execution_count": 4, + "id": "71126d13", "metadata": { "tags": [] }, @@ -558,6 +572,7 @@ }, { "cell_type": "markdown", + "id": "ca992944", "metadata": {}, "source": [ "Finaly, let's save the estimated background to file:" @@ -566,6 +581,7 @@ { "cell_type": "code", "execution_count": 5, + "id": "5deeee04", "metadata": { "tags": [] }, diff --git a/docs/tutorials/index.rst b/docs/tutorials/index.rst index 8fb7cbcf..a6fd4925 100644 --- a/docs/tutorials/index.rst +++ b/docs/tutorials/index.rst @@ -16,7 +16,7 @@ List of tutorials and contents, as a link to the corresponding Python notebook i - Combining files. - Inspecting and plotting the data -2. Spacecraft orientation and location `(ipynb) `_ +2. Spacecraft orientation and location `(ipynb) `_ - SC file format and manipulation it —e.g. get a time range, rebin it. - The dwell time map and how to obtain it @@ -79,7 +79,7 @@ List of tutorials and contents, as a link to the corresponding Python notebook i :maxdepth: 1 Data format and handling - response/SpacecraftFile.ipynb + response/SpacecraftHistory.ipynb Detector response and signal expectation TS Map: localizing a GRB Fitting the spectrum of a GRB diff --git a/docs/tutorials/polarization/ASAD_method.ipynb b/docs/tutorials/polarization/ASAD_method.ipynb index 9c7f8302..dbfa9aaf 100644 --- a/docs/tutorials/polarization/ASAD_method.ipynb +++ b/docs/tutorials/polarization/ASAD_method.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "1abbf1f4", "metadata": {}, "source": [ "# Polarization example - azimuthal scattering angle distribution (ASAD) method" @@ -9,6 +10,7 @@ }, { "cell_type": "markdown", + "id": "6d648ec0", "metadata": {}, "source": [ "This notebook fits the polarization fraction and angle of a Data Challenge 3 GRB (GRB 080802386) simulated using MEGAlib and combined with albedo photon background. It's assumed that the start time, duration, localization, and spectrum of the GRB are already known. The GRB was simulated with 80% polarization at an angle of 90 degrees in the IAU convention, and was 20 degrees off-axis. A detailed description of the ASAD method, which is the approach used here to fit the polarization, is available on the [Data Challenge repository](https://github.com/cositools/cosi-data-challenges/tree/main/polarization). " @@ -17,11 +19,12 @@ { "cell_type": "code", "execution_count": 9, + "id": "68124bc7", "metadata": {}, "outputs": [], "source": [ "from cosipy import UnBinnedData\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.polarization.conventions import MEGAlibRelativeX, MEGAlibRelativeY, MEGAlibRelativeZ, IAUPolarizationConvention\n", "from cosipy.polarization.polarization_asad import PolarizationASAD\n", "from cosipy.threeml.custom_functions import Band_Eflux\n", @@ -36,6 +39,7 @@ }, { "cell_type": "markdown", + "id": "78c20573", "metadata": {}, "source": [ "### Download and read in data" @@ -43,6 +47,7 @@ }, { "cell_type": "markdown", + "id": "da42f4a1", "metadata": {}, "source": [ "This will download the files needed to run this notebook. If you have already downloaded these files, you can skip this." @@ -50,6 +55,7 @@ }, { "cell_type": "markdown", + "id": "ccc0b0fe", "metadata": {}, "source": [ "Download the unbinned data (660.58 KB)" @@ -58,6 +64,7 @@ { "cell_type": "code", "execution_count": null, + "id": "3085a17d", "metadata": {}, "outputs": [], "source": [ @@ -66,6 +73,7 @@ }, { "cell_type": "markdown", + "id": "f606b6a8", "metadata": {}, "source": [ "Download the polarization response (217.47 MB)" @@ -74,6 +82,7 @@ { "cell_type": "code", "execution_count": null, + "id": "556b7980", "metadata": {}, "outputs": [], "source": [ @@ -82,6 +91,7 @@ }, { "cell_type": "markdown", + "id": "392ea1ed", "metadata": {}, "source": [ "Download the orientation file (1.10 GB)" @@ -90,6 +100,7 @@ { "cell_type": "code", "execution_count": null, + "id": "d08d00dd", "metadata": {}, "outputs": [], "source": [ @@ -98,6 +109,7 @@ }, { "cell_type": "markdown", + "id": "07cdd9d0", "metadata": {}, "source": [ "Read in the unbinned data, which is a GRB placed within albedo photon background. A time cut is done for the duration of the GRB to produce the GRB+background data to fit. The time intervals before and after the GRB are used to produce a background model. An energy cut is applied to both the data and background model to match the energy range of the detector response. Binned data can also be used for the ASAD method for both the data and background model." @@ -106,6 +118,7 @@ { "cell_type": "code", "execution_count": null, + "id": "c33c31ca", "metadata": {}, "outputs": [], "source": [ @@ -130,6 +143,7 @@ }, { "cell_type": "markdown", + "id": "322f3ac7", "metadata": {}, "source": [ "Define the path to the detector response and read in the orientation file. The orientation is cut down to the time interval of the source." @@ -138,17 +152,19 @@ { "cell_type": "code", "execution_count": null, + "id": "5b9bdf94", "metadata": {}, "outputs": [], "source": [ "response_file = data_path/'ResponseContinuum.o3.pol.e200_10000.b4.p12.relx.s10396905069491.m420.filtered.binnedpolarization.11D.h5' # e.g. ResponseContinuum.o3.pol.e200_10000.b4.p12.s10396905069491.m441.filtered.binnedpolarization.11D.h5\n", "\n", - "sc_orientation = SpacecraftFile.parse_from_file(data_path/'DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori') # e.g. DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori\n", + "sc_orientation = SpacecraftHistory.parse_from_file(data_path/'DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori') # e.g. DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori\n", "sc_orientation = sc_orientation.source_interval(Time(1835493492.2, format = 'unix'), Time(1835493492.8, format = 'unix'))" ] }, { "cell_type": "markdown", + "id": "b16cdbf3", "metadata": {}, "source": [ "Define the GRB position and spectrum." @@ -157,6 +173,7 @@ { "cell_type": "code", "execution_count": 15, + "id": "9e6752e3", "metadata": {}, "outputs": [], "source": [ @@ -184,6 +201,7 @@ }, { "cell_type": "markdown", + "id": "940322cc", "metadata": {}, "source": [ "### Polarization fit in ICRS frame" @@ -191,6 +209,7 @@ }, { "cell_type": "markdown", + "id": "466d4c44", "metadata": {}, "source": [ "Define the azimuthal scattering angle bin edges, and create the polarization object. A number of steps are done when this object is created:\n", @@ -205,6 +224,7 @@ { "cell_type": "code", "execution_count": null, + "id": "13f08813", "metadata": {}, "outputs": [ { @@ -386,6 +406,7 @@ }, { "cell_type": "markdown", + "id": "d4e0ecdb", "metadata": {}, "source": [ "Perform the fit in the ICRS frame. A sinusoid is fit to the GRB ASAD, and the amplitude and phase are used to determine the polarization fraction and angle, respectively. Because the polarization angles are distributed uniformly within the polarization angle bins of the response, and the bins are coarse, the $\\mu_{100}$ is underestimated, causing the fitted polarization fraction of the source to be overestimated." @@ -394,6 +415,7 @@ { "cell_type": "code", "execution_count": 19, + "id": "778a1d0a", "metadata": {}, "outputs": [ { @@ -426,6 +448,7 @@ }, { "cell_type": "markdown", + "id": "b031f3ba", "metadata": {}, "source": [ "### Polarization fit in spacecraft coordinates" @@ -433,6 +456,7 @@ }, { "cell_type": "markdown", + "id": "816a2b26", "metadata": {}, "source": [ "For short duration sources, such as this short GRB, the spacecraft does not move much, allowing the fit to also be done in spacecraft coordinates. This cell determines the spacecraft attitude at the beginning of the GRB and transform the source position to the spacecraft frame. Then, the polarization object is generated with the convention for the fit specified as the same convention used to generate the detector response, which is RelativeZ in this case. The ASADs are now generated in the spacecraft frame, instead of the ICRS frame as they were above.\n" @@ -441,6 +465,7 @@ { "cell_type": "code", "execution_count": null, + "id": "e1b988b6", "metadata": {}, "outputs": [ { @@ -642,6 +667,7 @@ }, { "cell_type": "markdown", + "id": "5302bed2", "metadata": {}, "source": [ "Perform the fit in spacecraft coordinates." @@ -650,6 +676,7 @@ { "cell_type": "code", "execution_count": 21, + "id": "aab1ad2c", "metadata": {}, "outputs": [ { @@ -682,6 +709,7 @@ }, { "cell_type": "markdown", + "id": "a182e3c0", "metadata": {}, "source": [ "The fitted polarization angle is always output in the IAU convention, but can easily be transformed to other conventions." @@ -690,6 +718,7 @@ { "cell_type": "code", "execution_count": 22, + "id": "74282297", "metadata": {}, "outputs": [ { diff --git a/docs/tutorials/response/DetectorResponse.ipynb b/docs/tutorials/response/DetectorResponse.ipynb index 719dc0ae..f375399a 100644 --- a/docs/tutorials/response/DetectorResponse.ipynb +++ b/docs/tutorials/response/DetectorResponse.ipynb @@ -48,7 +48,7 @@ "\n", "from scoords import Attitude, SpacecraftFrame\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy import test_data\n", "from cosipy.util import fetch_wasabi_file\n", "from histpy import Histogram\n", @@ -453,7 +453,7 @@ ], "source": [ "# Read the full orientation\n", - "ori = SpacecraftFile.parse_from_file(ori_path)\n", + "ori = SpacecraftHistory.parse_from_file(ori_path)\n", "\n", "# Define the target coordinates (Crab)\n", "target_coord = SkyCoord(184.5551, -05.7877, unit = \"deg\", frame = \"galactic\")\n", @@ -814,7 +814,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "You can also convert the point source response to XSPEC readable files (arf, rmf and pha) if you want to do spectral fitting or simulation in XSPEC. See the `SpacecraftFile` class functions `get_arf()`, `get_rmf()` and `get_pha()`, respectively." + "You can also convert the point source response to XSPEC readable files (arf, rmf and pha) if you want to do spectral fitting or simulation in XSPEC. See the `SpacecraftHistory` class functions `get_arf()`, `get_rmf()` and `get_pha()`, respectively." ] }, { diff --git a/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb b/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb index eada0be4..0a2c79af 100644 --- a/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb +++ b/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "2a6bafbd", "metadata": {}, "source": [ "# Point Source Response with Earth Occultation\n", @@ -18,6 +19,7 @@ { "cell_type": "code", "execution_count": 1, + "id": "518224fe", "metadata": {}, "outputs": [ { @@ -275,7 +277,7 @@ ], "source": [ "# Imports\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.response import FullDetectorResponse\n", "from cosipy.util import fetch_wasabi_file\n", "import astropy.units as u\n", @@ -285,6 +287,7 @@ }, { "cell_type": "markdown", + "id": "eba8841c", "metadata": {}, "source": [ "Load orientation file:" @@ -293,15 +296,17 @@ { "cell_type": "code", "execution_count": 4, + "id": "259bd69a", "metadata": {}, "outputs": [], "source": [ "ori_file = \"your/path/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth.ori\"\n", - "ori = SpacecraftFile.parse_from_file(ori_file)" + "ori = SpacecraftHistory.parse_from_file(ori_file)" ] }, { "cell_type": "markdown", + "id": "2c4dca26", "metadata": {}, "source": [ "Define coordinate of source (Crab):" @@ -310,6 +315,7 @@ { "cell_type": "code", "execution_count": 5, + "id": "fd7da129", "metadata": {}, "outputs": [], "source": [ @@ -318,6 +324,7 @@ }, { "cell_type": "markdown", + "id": "94c95128", "metadata": {}, "source": [ "Calculate scatt map for given coordinate:" @@ -326,6 +333,7 @@ { "cell_type": "code", "execution_count": 7, + "id": "c13c4fb4", "metadata": {}, "outputs": [], "source": [ @@ -334,6 +342,7 @@ }, { "cell_type": "markdown", + "id": "a59d0ac1", "metadata": {}, "source": [ "Calculate psr:" @@ -342,6 +351,7 @@ { "cell_type": "code", "execution_count": 8, + "id": "14c8ad41", "metadata": {}, "outputs": [ { @@ -362,6 +372,7 @@ }, { "cell_type": "markdown", + "id": "726158b0", "metadata": {}, "source": [ "### Now let's do a simple test:\n", @@ -371,16 +382,18 @@ { "cell_type": "code", "execution_count": 9, + "id": "7e0d7492", "metadata": {}, "outputs": [], "source": [ "ori_file = \"your/path/test_earth_occ.ori\"\n", - "ori = SpacecraftFile.parse_from_file(ori_file)" + "ori = SpacecraftHistory.parse_from_file(ori_file)" ] }, { "cell_type": "code", "execution_count": 10, + "id": "e33cd0db", "metadata": {}, "outputs": [], "source": [ @@ -390,6 +403,7 @@ { "cell_type": "code", "execution_count": 11, + "id": "2f862091", "metadata": {}, "outputs": [], "source": [ @@ -399,6 +413,7 @@ { "cell_type": "code", "execution_count": 12, + "id": "ea1116c4", "metadata": {}, "outputs": [ { diff --git a/docs/tutorials/response/SpacecraftFile.ipynb b/docs/tutorials/response/SpacecraftFile.ipynb index c6b1a472..ab497d0f 100644 --- a/docs/tutorials/response/SpacecraftFile.ipynb +++ b/docs/tutorials/response/SpacecraftFile.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "9ab76c4d", "metadata": {}, "source": [ "# Spacecraft file: attitude and position" @@ -9,21 +10,24 @@ }, { "cell_type": "markdown", + "id": "8300ba84", "metadata": {}, "source": [ - "The spacecraft is always moving and changing orientations. The attitude --i.e. orientation-- vs. time is handled by the SpacecraftFile class. This allows us to transform from spacecraft coordinates to inertial coordinate --e.g. galactics coordinates." + "The spacecraft is always moving and changing orientations. The attitude --i.e. orientation-- vs. time is handled by the SpacecraftHistory class. This allows us to transform from spacecraft coordinates to inertial coordinate --e.g. galactics coordinates." ] }, { "cell_type": "markdown", + "id": "6675919e", "metadata": {}, "source": [ "
\n", - "Note: In future versions, the SpacecraftFile class will handle the spacecraft location --i.e. latitude, longitude, and altitude-- in addition to its attitude. This will allow us to know where the Earth is located in the field of view, which we are currently ignoring for simplicity.
" + "Note: In future versions, the SpacecraftHistory class will handle the spacecraft location --i.e. latitude, longitude, and altitude-- in addition to its attitude. This will allow us to know where the Earth is located in the field of view, which we are currently ignoring for simplicity." ] }, { "cell_type": "markdown", + "id": "61a35427", "metadata": {}, "source": [ "## Dependencies" @@ -32,6 +36,7 @@ { "cell_type": "code", "execution_count": 1, + "id": "51b4b0c3", "metadata": { "scrolled": true, "tags": [] @@ -54,12 +59,13 @@ "import os\n", "\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.util import fetch_wasabi_file" ] }, { "cell_type": "markdown", + "id": "afda1ab0", "metadata": {}, "source": [ "## File downloads" @@ -67,6 +73,7 @@ }, { "cell_type": "markdown", + "id": "09b9e70d", "metadata": {}, "source": [ "You can skip this step if you already downloaded the files. Make sure that paths point to the right files" @@ -75,6 +82,7 @@ { "cell_type": "code", "execution_count": 2, + "id": "f79beeab", "metadata": { "tags": [] }, @@ -86,6 +94,7 @@ { "cell_type": "code", "execution_count": 3, + "id": "8b4e34f3", "metadata": { "tags": [] }, @@ -98,6 +107,7 @@ { "cell_type": "code", "execution_count": 4, + "id": "5d15bd45", "metadata": { "tags": [] }, @@ -112,6 +122,7 @@ }, { "cell_type": "markdown", + "id": "4ca6fea9", "metadata": {}, "source": [ "## Orientation file format and loading" @@ -119,6 +130,7 @@ }, { "cell_type": "markdown", + "id": "9485703e", "metadata": {}, "source": [ "The attitude os the spacecraft is specified by the galactic coordinates that the x and z axes of the spacecraft are pointing to. The y-axis pointing can be deduced from this information (right-handed system convention).\n", @@ -133,6 +145,7 @@ } }, "cell_type": "markdown", + "id": "dc12adcf", "metadata": {}, "source": [ "![Xnip2023-03-02_10-55-29.jpg](attachment:04c01833-6f41-4b2a-812f-56ce5790948d.jpg)" @@ -140,6 +153,7 @@ }, { "cell_type": "markdown", + "id": "f4e7f2c6", "metadata": {}, "source": [ "Currently, this information is stored in a text file with a filename ending in \".ori\", a format inherited from MEGALib. Each line contains the keyword \"OG\", followd by: time stamp (GPS seconds), x-axis galactic latitude (deg), x-axis galactic longitude (deg), z-axis galactic latitude (deg), z-axis galactic longitude (deg). " @@ -148,6 +162,7 @@ { "cell_type": "code", "execution_count": 5, + "id": "899325b9", "metadata": { "tags": [] }, @@ -187,28 +202,31 @@ }, { "cell_type": "markdown", + "id": "6fe2ed44", "metadata": {}, "source": [ "
\n", - "Note: The orientation (.ori) file format will change in the future, from a text file to a FITS file. However, the file contents and the capabilities of the SpacecraftFile class will be the same.
" + "Note: The orientation (.ori) file format will change in the future, from a text file to a FITS file. However, the file contents and the capabilities of the SpacecraftHistory class will be the same." ] }, { "cell_type": "markdown", + "id": "eabd09f6", "metadata": {}, "source": [ - "You don't have to remember the internal format though, just load it using the SpacecraftFile class:" + "You don't have to remember the internal format though, just load it using the SpacecraftHistory class:" ] }, { "cell_type": "code", "execution_count": 6, + "id": "83e5ac10", "metadata": { "tags": [] }, "outputs": [], "source": [ - "ori = SpacecraftFile.parse_from_file(ori_path)\n", + "ori = SpacecraftHistory.parse_from_file(ori_path)\n", "\n", "# Let's use only 1 hr in this example\n", "ori = ori.source_interval(ori.get_time()[0], ori.get_time()[0] + 1*u.hr)" @@ -216,6 +234,7 @@ }, { "cell_type": "markdown", + "id": "2ef82764", "metadata": {}, "source": [ "You can plot the pointings to see how the zenith changes over the observation. In this example, we'll plot only 1 hr:" @@ -224,6 +243,7 @@ { "cell_type": "code", "execution_count": 7, + "id": "d8e0e1b8", "metadata": { "tags": [] }, @@ -272,6 +292,7 @@ }, { "cell_type": "markdown", + "id": "55896ec8", "metadata": {}, "source": [ "## Calculate the source movement in the SC frame" @@ -279,6 +300,7 @@ }, { "cell_type": "markdown", + "id": "ea2bb02d", "metadata": {}, "source": [ "This converts a fixed coordinate in the galactic frame to the coordinate in the SC frame as a function of time:" @@ -287,6 +309,7 @@ { "cell_type": "code", "execution_count": 8, + "id": "040d8c91", "metadata": { "tags": [] }, @@ -302,6 +325,7 @@ { "cell_type": "code", "execution_count": 9, + "id": "50bf313f", "metadata": { "tags": [] }, @@ -335,6 +359,7 @@ }, { "cell_type": "markdown", + "id": "74e64371", "metadata": {}, "source": [ "## The dwell time map" @@ -342,6 +367,7 @@ }, { "cell_type": "markdown", + "id": "80a09513", "metadata": {}, "source": [ "Since the response of the instrument is a function of the local coordinates, we need to calculate the movement of the source in the spacecraft frame. This is achieved with the help of a \"dwell time map\", which contains the amount of time a given source spent in a particular location of the COSI field of view. This is then convolved with the instrument response to get the point source response. " @@ -350,6 +376,7 @@ { "cell_type": "code", "execution_count": 10, + "id": "0d722b77", "metadata": { "tags": [] }, @@ -371,6 +398,7 @@ }, { "cell_type": "markdown", + "id": "2a7485eb", "metadata": {}, "source": [ "Plot the dwell time map in detector coordinates. The top is the boresight of the instrument. Note that in this plot the longitude increases to the left." @@ -379,6 +407,7 @@ { "cell_type": "code", "execution_count": 11, + "id": "146a6160", "metadata": { "tags": [] }, @@ -400,6 +429,7 @@ }, { "cell_type": "markdown", + "id": "ca14496a", "metadata": {}, "source": [ "The dwell time map sums up to the total observed time:" @@ -408,6 +438,7 @@ { "cell_type": "code", "execution_count": 12, + "id": "bf0a77be", "metadata": { "tags": [] }, @@ -432,6 +463,7 @@ }, { "cell_type": "markdown", + "id": "a2404ae5", "metadata": {}, "source": [ "## The scatt map" @@ -439,6 +471,7 @@ }, { "cell_type": "markdown", + "id": "d5ff66e7", "metadata": {}, "source": [ "As the spacecraft rotates, a fixed source in the sky is seen by the detector from multiple direction. Convolving the dweel time map with the instrument response, without binning it simultenously in time, can wash out the signal. Since the spacecraft can have the same orientation multiple times, we avoid performing the same rotation multiple times by creating a histogram that keeps track of the attitude information. This is the \"spacecraft attitude map\" ---a.k.a scatt mapp--- which is a 4D matrix that contain the amount of time that the `x` and `y` SC axes were pointing at a given location in inertial coordinates -e.g. galactic." @@ -447,6 +480,7 @@ { "cell_type": "code", "execution_count": 13, + "id": "2dd82889", "metadata": { "tags": [] }, @@ -458,6 +492,7 @@ }, { "cell_type": "markdown", + "id": "61460c7d", "metadata": {}, "source": [ "This is a how the 2D projections looks like" @@ -466,6 +501,7 @@ { "cell_type": "code", "execution_count": 15, + "id": "afff73ff", "metadata": {}, "outputs": [ { @@ -502,6 +538,7 @@ { "cell_type": "code", "execution_count": null, + "id": "98b5b8e7", "metadata": {}, "outputs": [], "source": [] diff --git a/docs/tutorials/response/extended_source_response_generator.py b/docs/tutorials/response/extended_source_response_generator.py index f889a614..52e0213c 100644 --- a/docs/tutorials/response/extended_source_response_generator.py +++ b/docs/tutorials/response/extended_source_response_generator.py @@ -7,7 +7,7 @@ logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response import FullDetectorResponse, ExtendedSourceResponse # file path @@ -16,7 +16,7 @@ # load response and orientation full_detector_response = FullDetectorResponse.open(full_detector_response_path) -orientation = SpacecraftFile.parse_from_file(orientation_path) +orientation = SpacecraftHistory.open(orientation_path) # generate your extended source response extended_source_response = full_detector_response.get_extended_source_response(orientation, diff --git a/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py b/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py index baeb039b..8d2ccef3 100644 --- a/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py +++ b/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py @@ -7,7 +7,7 @@ logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response import FullDetectorResponse, ExtendedSourceResponse # file path @@ -16,7 +16,7 @@ # load response and orientation full_detector_response = FullDetectorResponse.open(full_detector_response_path) -orientation = SpacecraftFile.parse_from_file(orientation_path) +orientation = SpacecraftHistory.open(orientation_path) # set the healpix pixel index list ipix_image_list = [int(_) for _ in sys.argv[1:]] diff --git a/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py b/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py index 8e279e5c..a0e74547 100644 --- a/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py +++ b/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py @@ -7,7 +7,7 @@ logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response import FullDetectorResponse, ExtendedSourceResponse # load full detector response diff --git a/docs/tutorials/run_tutorials.py b/docs/tutorials/run_tutorials.py index 06c374ba..b1073b56 100755 --- a/docs/tutorials/run_tutorials.py +++ b/docs/tutorials/run_tutorials.py @@ -3,6 +3,8 @@ import logging import traceback +from nbclient.exceptions import CellExecutionError + logging.basicConfig(format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) @@ -15,7 +17,7 @@ from pathlib import Path import nbformat -from nbconvert.preprocessors import ExecutePreprocessor +from nbconvert.preprocessors import ExecutePreprocessor, RegexRemovePreprocessor from nbconvert import HTMLExporter from nbconvert.writers import FilesWriter @@ -220,6 +222,7 @@ def run_tutorial(tutorial): os.symlink(local_copy, wdir/local_copy.name) # Run + failed = False if not args.dry: for notebook in notebooks: source_nb_path = config.absolute_path(notebook) @@ -228,13 +231,33 @@ def run_tutorial(tutorial): with (open(nb_path) as nb_file): nb = nbformat.read(nb_file, as_version=nbformat.NO_CONVERT) + # Remove magic, which can make a failing notebook look + # like it succeeded. + for cell in nb.cells: + if cell.cell_type == 'code': + source = cell.source.strip("\n").lstrip() + if len(source) >= 2 and source[:2] == "%%": + cell.source = cell.source.replace("%%", "#[magic commented out by run_tutorials.py]%%") + logger.info(f"Executing notebook {source_nb_path}...") start_time = timeit.default_timer() ep = ExecutePreprocessor(timeout=config['globals:timeout'], kernel_name=config['globals:kernel']) - ep_out = ep.preprocess(nb, {'metadata': {'path': str(wdir)}}) + + try: + ep_out = ep.preprocess(nb, {'metadata': {'path': str(wdir)}}) + except CellExecutionError as e: + # Will re-raise after output and cleaning + cell_exception = e + failed = True + elapsed = timeit.default_timer() - start_time - logger.info(f"Notebook {source_nb_path} took {elapsed} seconds to finish.") + if failed: + logger.error(f"Notebook {source_nb_path} failed after {elapsed} seconds") + else: + logger.info(f"Notebook {source_nb_path} took {elapsed} seconds to finish.") + + # Save output nb_exec_path = nb_path.with_name(nb_path.stem + "_executed" + nb_path.suffix) with open(nb_exec_path, 'w', encoding='utf-8') as exec_nb_file: nbformat.write(nb, exec_nb_file) @@ -249,6 +272,10 @@ def run_tutorial(tutorial): # Remove file logger logger.removeHandler(file_handler) + # Re-raise if failed + if failed: + raise cell_exception + # Loop through each tutorial summary = {} for tutorial in tutorials: @@ -277,7 +304,11 @@ def run_tutorial(tutorial): if succeeded: logger.info(colorama.Fore.GREEN + "SUCCEEDED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") else: - logger.info(colorama.Fore.RED + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") + color = colorama.Fore.RED + if "test_must_fail" in tutorial: + # Failed succesfully! + color = colorama.Fore.GREEN + logger.info(color + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") # Overall summary log logger.info(f"cosipy version: {cosipy.__version__}") @@ -290,7 +321,11 @@ def run_tutorial(tutorial): if succeeded: logger.info(colorama.Fore.GREEN + "SUCCEEDED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") else: - logger.info(colorama.Fore.RED + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") + color = colorama.Fore.RED + if "test_must_fail" in tutorial: + # Failed succesfully! + color = colorama.Fore.GREEN + logger.info(color + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") if __name__ == "__main__": diff --git a/docs/tutorials/run_tutorials.yml b/docs/tutorials/run_tutorials.yml index fb602e5f..d9a47fe3 100644 --- a/docs/tutorials/run_tutorials.yml +++ b/docs/tutorials/run_tutorials.yml @@ -31,6 +31,8 @@ tutorials: unzip: True # Optional. False by default #unzip_output: # Optional, if the unzipped file name is different from just removing the .zip or .gz + test_must_fail_magic: + notebook: test/test_must_fail_magic.ipynb dataIO: notebook: DataIO/DataIO_example.ipynb @@ -42,7 +44,7 @@ tutorials: checksum: 408edb7dc2e3dce44c0f275e4ba56fd8 spacecraft_file: - notebook: response/SpacecraftFile.ipynb + notebook: response/SpacecraftHistory.ipynb wasabi_files: COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori: checksum: 416fcc296fc37a056a069378a2d30cb2 diff --git a/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb b/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb index e944a89e..600ec451 100644 --- a/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb +++ b/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "7a73f543", "metadata": { "tags": [] }, @@ -11,6 +12,7 @@ }, { "cell_type": "markdown", + "id": "fab57d72", "metadata": {}, "source": [ "**To run this, you need the following files, which can be downloaded using the first few cells of this notebook:**\n", @@ -23,6 +25,7 @@ }, { "cell_type": "markdown", + "id": "2c66d0f5", "metadata": {}, "source": [ "This notebook fits the spectrum of a GRB simulated using MEGAlib and combined with background.\n", @@ -63,6 +66,7 @@ { "cell_type": "code", "execution_count": 1, + "id": "8191a28d", "metadata": {}, "outputs": [ { @@ -320,7 +324,7 @@ ], "source": [ "from cosipy import COSILike, BinnedData\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.response.FullDetectorResponse import FullDetectorResponse\n", "from cosipy.util import fetch_wasabi_file\n", "\n", @@ -346,6 +350,7 @@ }, { "cell_type": "markdown", + "id": "d038f5b4", "metadata": {}, "source": [ "## Download and read in binned data" @@ -353,6 +358,7 @@ }, { "cell_type": "markdown", + "id": "1866b59d", "metadata": {}, "source": [ "Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into" @@ -361,6 +367,7 @@ { "cell_type": "code", "execution_count": 4, + "id": "1ef5351b", "metadata": {}, "outputs": [], "source": [ @@ -369,6 +376,7 @@ }, { "cell_type": "markdown", + "id": "4a079a00", "metadata": {}, "source": [ "Download the orientation file (684.38 MB)" @@ -377,6 +385,7 @@ { "cell_type": "code", "execution_count": 6, + "id": "ae25fc81", "metadata": {}, "outputs": [], "source": [ @@ -385,6 +394,7 @@ }, { "cell_type": "markdown", + "id": "bc767d3a", "metadata": {}, "source": [ "Download the binned GRB+background data (75.73 KB)" @@ -393,6 +403,7 @@ { "cell_type": "code", "execution_count": 8, + "id": "e04945d5", "metadata": {}, "outputs": [], "source": [ @@ -401,6 +412,7 @@ }, { "cell_type": "markdown", + "id": "89220d0d", "metadata": {}, "source": [ "Download the binned GRB data (76.90 KB)" @@ -409,6 +421,7 @@ { "cell_type": "code", "execution_count": 19, + "id": "007202d6", "metadata": {}, "outputs": [], "source": [ @@ -417,6 +430,7 @@ }, { "cell_type": "markdown", + "id": "49a1d80c", "metadata": {}, "source": [ "Download the binned background data (255.97 MB)" @@ -425,6 +439,7 @@ { "cell_type": "code", "execution_count": 20, + "id": "cddcc497", "metadata": {}, "outputs": [], "source": [ @@ -433,6 +448,7 @@ }, { "cell_type": "markdown", + "id": "52e3093f", "metadata": {}, "source": [ "Download the response file (596.06 MB)" @@ -441,6 +457,7 @@ { "cell_type": "code", "execution_count": 17, + "id": "fc650dad", "metadata": {}, "outputs": [ { @@ -457,6 +474,7 @@ }, { "cell_type": "markdown", + "id": "21963836", "metadata": {}, "source": [ "Read in the spacecraft orientation file & select the beginning and end times of the GRB" @@ -465,10 +483,11 @@ { "cell_type": "code", "execution_count": 5, + "id": "b99c6254", "metadata": {}, "outputs": [], "source": [ - "ori = SpacecraftFile.parse_from_file(data_path / \"20280301_3_month_with_orbital_info.ori\")\n", + "ori = SpacecraftHistory.parse_from_file(data_path / \"20280301_3_month_with_orbital_info.ori\")\n", "tmin = Time(1842597410.0,format = 'unix')\n", "tmax = Time(1842597450.0,format = 'unix')\n", "sc_orientation = ori.source_interval(tmin, tmax)" @@ -476,6 +495,7 @@ }, { "cell_type": "markdown", + "id": "50d50196", "metadata": {}, "source": [ "Create BinnedData objects for the GRB only, GRB+background, and background only. The GRB only simulation is not used for the spectral fit, but can be used to compare the fitted spectrum to the source simulation" @@ -484,6 +504,7 @@ { "cell_type": "code", "execution_count": 6, + "id": "40a6f617", "metadata": {}, "outputs": [], "source": [ @@ -494,6 +515,7 @@ }, { "cell_type": "markdown", + "id": "f09bbd63", "metadata": {}, "source": [ "Load binned .hdf5 files" @@ -502,6 +524,7 @@ { "cell_type": "code", "execution_count": 7, + "id": "568caf77", "metadata": {}, "outputs": [], "source": [ @@ -512,6 +535,7 @@ }, { "cell_type": "markdown", + "id": "9a7560cc", "metadata": {}, "source": [ "Define the path to the detector response" @@ -520,6 +544,7 @@ { "cell_type": "code", "execution_count": 8, + "id": "7b977e5a", "metadata": {}, "outputs": [], "source": [ @@ -528,6 +553,7 @@ }, { "cell_type": "markdown", + "id": "6022d9fc", "metadata": { "tags": [] }, @@ -537,6 +563,7 @@ }, { "cell_type": "markdown", + "id": "e575eae4", "metadata": {}, "source": [ "Define time window of binned background simulation to use for background model" @@ -545,6 +572,7 @@ { "cell_type": "code", "execution_count": 9, + "id": "e9eadde7", "metadata": {}, "outputs": [], "source": [ @@ -556,6 +584,7 @@ }, { "cell_type": "markdown", + "id": "1f7de112", "metadata": {}, "source": [ "Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin" @@ -564,6 +593,7 @@ { "cell_type": "code", "execution_count": 10, + "id": "2d3dc68a", "metadata": {}, "outputs": [], "source": [ @@ -584,6 +614,7 @@ }, { "cell_type": "markdown", + "id": "c6fd49c0", "metadata": {}, "source": [ "Define a point source at the known location with a Band function spectrum and add it to the model" @@ -592,6 +623,7 @@ { "cell_type": "code", "execution_count": 11, + "id": "66ea955d", "metadata": {}, "outputs": [], "source": [ @@ -636,6 +668,7 @@ }, { "cell_type": "markdown", + "id": "687789e8", "metadata": {}, "source": [ "Gather all plugins and combine with the model in a JointLikelihood object, then perform maximum likelihood fit" @@ -644,6 +677,7 @@ { "cell_type": "code", "execution_count": 12, + "id": "2d807108", "metadata": { "scrolled": true, "tags": [] @@ -973,6 +1007,7 @@ }, { "cell_type": "markdown", + "id": "45547c14", "metadata": {}, "source": [ "## Error propagation and plotting" @@ -980,6 +1015,7 @@ }, { "cell_type": "markdown", + "id": "49006e73", "metadata": {}, "source": [ "Define Band function spectrum injected into MEGAlib" @@ -988,6 +1024,7 @@ { "cell_type": "code", "execution_count": 13, + "id": "5c71cf92", "metadata": {}, "outputs": [], "source": [ @@ -1015,6 +1052,7 @@ }, { "cell_type": "markdown", + "id": "3993ae3f", "metadata": {}, "source": [ "The summary of the results above tell you the optimal values of the parameters, as well as the errors. Propogate the errors to the \"evaluate_at\" method of the spectrum" @@ -1023,6 +1061,7 @@ { "cell_type": "code", "execution_count": 14, + "id": "131c801c", "metadata": { "scrolled": true, "tags": [] @@ -1362,6 +1401,7 @@ }, { "cell_type": "markdown", + "id": "49b76b6d", "metadata": {}, "source": [ "Evaluate the flux and errors at a range of energies for the fitted and injected spectra, and the simulated source flux" @@ -1370,6 +1410,7 @@ { "cell_type": "code", "execution_count": 15, + "id": "333c8355", "metadata": {}, "outputs": [], "source": [ @@ -1399,6 +1440,7 @@ }, { "cell_type": "markdown", + "id": "0ad4e940", "metadata": {}, "source": [ "Plot the fitted and injected spectra" @@ -1407,6 +1449,7 @@ { "cell_type": "code", "execution_count": 16, + "id": "576cde7b", "metadata": { "tags": [] }, @@ -1450,6 +1493,7 @@ }, { "cell_type": "markdown", + "id": "f49a996c", "metadata": {}, "source": [ "Plot the fitted spectrum convolved with the response, as well as the simulated source counts" @@ -1458,6 +1502,7 @@ { "cell_type": "code", "execution_count": 17, + "id": "ced626e7", "metadata": {}, "outputs": [ { @@ -1523,6 +1568,7 @@ }, { "cell_type": "markdown", + "id": "4c8204c7", "metadata": {}, "source": [ "Plot the fitted spectrum convolved with the response plus the fitted background, as well as the simulated source+background counts" @@ -1531,6 +1577,7 @@ { "cell_type": "code", "execution_count": 18, + "id": "76849551", "metadata": {}, "outputs": [ { diff --git a/docs/tutorials/test/test_must_fail_magic.ipynb b/docs/tutorials/test/test_must_fail_magic.ipynb new file mode 100644 index 00000000..2f4277f7 --- /dev/null +++ b/docs/tutorials/test/test_must_fail_magic.ipynb @@ -0,0 +1,73 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "b82642d2-a68f-41d2-8a63-3fee53880c71", + "metadata": {}, + "source": [ + "# Test magic removal" + ] + }, + { + "cell_type": "markdown", + "id": "b42b3af6-b4e6-4809-bbcb-8917202d5c44", + "metadata": {}, + "source": [ + "Magic cells are run in a subprocess, which catches exceptions and makes it look like the notebook succeeded " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e9ae042-38d3-4f51-9fd9-0f630bdc1e48", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "# This should fail since \"five\" has not been defined.\n", + "5*five" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eb67f11c-b8cc-42ef-ac81-ac01fd6a88da", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "# It shouldn't make it to this cell\n", + "5*5" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac335794-7781-410e-9b60-d0dfe2ef6ad3", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:cosipy]", + "language": "python", + "name": "conda-env-cosipy-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/setup.py b/setup.py index df963dcc..00b0cd34 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ author_email='imc@umd.edu', url='https://github.com/cositools/cosipy', packages = find_packages(include=["cosipy", "cosipy.*"]), - install_requires = ['histpy>=2.0.3', + install_requires = ['histpy>=2.0.5', 'h5py', 'hdf5plugin', 'mhealpy', diff --git a/tests/image_deconvolution/test_coordsys_conversion_matrix.py b/tests/image_deconvolution/test_coordsys_conversion_matrix.py index bfa3312b..7b9e6d3e 100644 --- a/tests/image_deconvolution/test_coordsys_conversion_matrix.py +++ b/tests/image_deconvolution/test_coordsys_conversion_matrix.py @@ -3,7 +3,7 @@ from cosipy import test_data from cosipy.response import FullDetectorResponse -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable from cosipy.image_deconvolution import CoordsysConversionMatrix @@ -12,7 +12,7 @@ # # full_detector_response = FullDetectorResponse.open(test_data.path / "test_full_detector_response.h5") # -# ori = SpacecraftFile.parse_from_file(test_data.path / "20280301_first_10sec.ori") +# ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") # # ccm = CoordsysConversionMatrix.time_binning_ccm(full_detector_response, ori, [ori.get_time()[0].value, ori.get_time()[-1].value] * u.s) # diff --git a/tests/image_deconvolution/test_exposure_table.py b/tests/image_deconvolution/test_exposure_table.py index 373acad9..a7577769 100644 --- a/tests/image_deconvolution/test_exposure_table.py +++ b/tests/image_deconvolution/test_exposure_table.py @@ -5,30 +5,30 @@ from cosipy import test_data from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable, TimeBinnedExposureTable -from cosipy.spacecraftfile import SpacecraftFile from cosipy import response from cosipy import BinnedData +from cosipy.spacecraftfile import SpacecraftHistory def test_scatt_exposure_table(tmp_path): nside = 1 - ori = SpacecraftFile.parse_from_file(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") - assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=None, stop=ori.get_time()[-1], min_livetime=0, min_num_pointings=1) == None + assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=None, stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) == None - assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=ori.get_time()[0], stop=None, min_livetime=0, min_num_pointings=1) == None + assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=ori.obstime[0], stop=None, min_livetime=0, min_num_pointings=1) == None exposure_table = SpacecraftAttitudeExposureTable.from_orientation(ori, nside=nside, - start=ori.get_time()[0], stop=ori.get_time()[-1], + start=ori.obstime[0], stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) exposure_table_nest = SpacecraftAttitudeExposureTable.from_orientation(ori, nside=nside, scheme = 'nested', - start=ori.get_time()[0], stop=ori.get_time()[-1], + start=ori.obstime[0], stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) exposure_table_badscheme = SpacecraftAttitudeExposureTable.from_orientation(ori, nside=nside, scheme = None, - start=ori.get_time()[0], stop=ori.get_time()[-1], + start=ori.obstime[0], stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) exposure_table.save_as_fits(tmp_path / "exposure_table_test_nside1_ring.fits") @@ -39,13 +39,13 @@ def test_scatt_exposure_table(tmp_path): assert np.all(map_pointing_zx.contents == Histogram.open(test_data.path / "image_deconvolution/map_pointing_zx_test_nside1_ring.hdf5").contents) - # test_generating_histogram + # test_generating_histogram full_detector_response = response.FullDetectorResponse.open(test_data.path / "test_full_detector_response.h5") - + analysis = BinnedData(test_data.path / "inputs_crab.yaml") - + analysis.cosi_dataset = analysis.get_dict_from_hdf5(test_data.path / "unbinned_data_MEGAlib_calc.hdf5") - + # modify the following parameters for unit test analysis.energy_bins = full_detector_response.axes['Em'].edges.to(u.keV).value analysis.nside = full_detector_response.axes['PsiChi'].nside @@ -55,10 +55,10 @@ def test_scatt_exposure_table(tmp_path): # NOTE: test_data.path / "unbinned_data_MEGAlib_calc.hdf5" is written in a old format!!! _ = analysis.cosi_dataset.pop('Xpointings') analysis.cosi_dataset['Xpointings (glon,glat)'] = _ - + _ = analysis.cosi_dataset.pop('Ypointings') analysis.cosi_dataset['Ypointings (glon,glat)'] = _ - + _ = analysis.cosi_dataset.pop('Zpointings') analysis.cosi_dataset['Zpointings (glon,glat)'] = _ @@ -69,39 +69,39 @@ def test_scatt_exposure_table(tmp_path): assert np.all(binned_signal.contents == binned_signal_ref.contents) def test_time_binned_exposure_table(tmp_path): - ori = SpacecraftFile.parse_from_file(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") tstart_list = Time([1835478000.0], scale='utc', format='unix') tstop_list = Time([1835478005.0], scale='utc', format='unix') - + exposure_table = TimeBinnedExposureTable.from_orientation(ori, tstart_list = tstart_list, tstop_list = tstop_list) exposure_table.save_as_fits(tmp_path / "exposure_table_test_time_binning.fits") - + assert exposure_table == TimeBinnedExposureTable.from_fits(tmp_path / "exposure_table_test_time_binning.fits") full_detector_response = response.FullDetectorResponse.open(test_data.path / "test_full_detector_response.h5") - + analysis = BinnedData(test_data.path / "inputs_crab.yaml") - + analysis.cosi_dataset = analysis.get_dict_from_hdf5(test_data.path / "unbinned_data_MEGAlib_calc.hdf5") - + # modify the following parameters for unit test analysis.energy_bins = full_detector_response.axes['Em'].edges.to(u.keV).value analysis.nside = full_detector_response.axes['PsiChi'].nside analysis.phi_pix_size = full_detector_response.axes['Phi'].widths[0].to(u.deg).value analysis.time_bins = 10 #s - + # NOTE: test_data.path / "unbinned_data_MEGAlib_calc.hdf5" is written in a old format!!! _ = analysis.cosi_dataset.pop('Xpointings') analysis.cosi_dataset['Xpointings (glon,glat)'] = _ - + _ = analysis.cosi_dataset.pop('Ypointings') analysis.cosi_dataset['Ypointings (glon,glat)'] = _ - + _ = analysis.cosi_dataset.pop('Zpointings') analysis.cosi_dataset['Zpointings (glon,glat)'] = _ - + binned_signal = exposure_table.get_binned_data(analysis, psichi_binning = 'local', sparse = False) binned_signal_ref = Histogram.open(test_data.path / "image_deconvolution" / 'test_event_histogram_localCDS_time.h5') diff --git a/tests/interfaces/__init__.py b/tests/interfaces/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/interfaces/test_background_interface.py b/tests/interfaces/test_background_interface.py new file mode 100644 index 00000000..e59e049b --- /dev/null +++ b/tests/interfaces/test_background_interface.py @@ -0,0 +1,5 @@ +from cosipy.interfaces import (BackgroundInterface, + BinnedBackgroundInterface, + BackgroundDensityInterface + ) + diff --git a/tests/polarization/test_polarization_asad.py b/tests/polarization/test_polarization_asad.py index 22474fc5..b5527b0f 100644 --- a/tests/polarization/test_polarization_asad.py +++ b/tests/polarization/test_polarization_asad.py @@ -3,9 +3,9 @@ from astropy import units as u from scoords import SpacecraftFrame -from cosipy.polarization import PolarizationASAD +from cosipy.polarization_fitting import PolarizationASAD from cosipy.polarization.conventions import IAUPolarizationConvention, MEGAlibRelativeZ -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy import BinnedData from cosipy.threeml.custom_functions import Band_Eflux from cosipy import test_data @@ -16,8 +16,8 @@ binned_data = analysis.binned_data response_path = test_data.path / 'test_polarization_response.h5' -sc_orientation = SpacecraftFile.parse_from_file(test_data.path / 'polarization_ori.ori') -attitude = sc_orientation.get_attitude()[0] +sc_orientation = SpacecraftHistory.open(test_data.path / 'polarization_ori.ori') +attitude = sc_orientation.attitude[0] a = 10. * u.keV b = 10000. * u.keV diff --git a/tests/response/test_full_detector_response.py b/tests/response/test_full_detector_response.py index c6014faf..d1d64893 100644 --- a/tests/response/test_full_detector_response.py +++ b/tests/response/test_full_detector_response.py @@ -9,7 +9,7 @@ from cosipy import test_data from cosipy.response import FullDetectorResponse -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_first_10sec.ori" @@ -110,7 +110,7 @@ def test_get_point_source_response(): scatt_map=scatt_map) def test_get_extended_source_response(): - orientation = SpacecraftFile.parse_from_file(orientation_path) + orientation = SpacecraftHistory.open(orientation_path) with FullDetectorResponse.open(response_path) as response: @@ -129,7 +129,7 @@ def test_get_extended_source_response(): def test_merge_psr_to_extended_source_response(tmp_path): - orientation = SpacecraftFile.parse_from_file(orientation_path) + orientation = SpacecraftHistory.open(orientation_path) with FullDetectorResponse.open(response_path) as response: diff --git a/tests/source_injector/test_source_injector.py b/tests/source_injector/test_source_injector.py index d42f9b7f..c72d66ee 100644 --- a/tests/source_injector/test_source_injector.py +++ b/tests/source_injector/test_source_injector.py @@ -1,4 +1,4 @@ -from cosipy import SpacecraftFile, SourceInjector +from cosipy import SpacecraftHistory, SourceInjector from astropy.coordinates import SkyCoord from threeML import Powerlaw from pathlib import Path @@ -15,7 +15,7 @@ def test_inject_point_source(): # defind the response and orientation response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftHistory.open(orientation_path) # powerlaw model index = -2.2 diff --git a/tests/spacecraftfile/test_arf_rmf_converter.py b/tests/spacecraftfile/test_arf_rmf_converter.py new file mode 100644 index 00000000..72c5b73c --- /dev/null +++ b/tests/spacecraftfile/test_arf_rmf_converter.py @@ -0,0 +1,272 @@ +import os +from pathlib import Path + +import numpy as np +from astropy.coordinates import SkyCoord +from astropy.io import fits +from cosipy import test_data, SpacecraftHistory +from cosipy.response import FullDetectorResponse +from cosipy.response import RspArfRmfConverter + +from astropy import units as u + +def test_get_psr_rsp(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + Ei_edges, Ei_lo, Ei_hi, Em_edges, Em_lo, Em_hi, areas, matrix = converter.get_psr_rsp() + + assert np.allclose(Ei_edges, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(Ei_lo, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(Ei_hi, + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(Em_edges, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(Em_lo, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(Em_hi, + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(areas, + np.array([0.06089862, 0.4563752, 1.1601573, 1.6237522, 2.0216975, + 2.2039971, 2.0773466, 1.7005537, 1.1626455, 0.80194914])) + + assert np.allclose(matrix, + np.array([[9.80996430e-01, 4.68325317e-02, 1.82471890e-02, 9.86817386e-03, + 5.82037494e-03, 3.47572053e-03, 2.80415593e-03, 3.13903880e-03, + 4.89909900e-03, 6.68705115e-03], + [1.90035217e-02, 9.44634676e-01, 1.28470331e-01, 9.38407257e-02, + 4.32382338e-02, 2.23877952e-02, 1.63043533e-02, 1.73287615e-02, + 2.80312393e-02, 3.78256924e-02], + [0.00000000e+00, 8.53277557e-03, 8.48568857e-01, 2.18858123e-01, + 1.85861006e-01, 7.39495233e-02, 4.45922092e-02, 4.06639054e-02, + 6.96888119e-02, 9.27841067e-02], + [0.00000000e+00, 0.00000000e+00, 4.71363496e-03, 6.62667990e-01, + 6.19757064e-02, 2.71992888e-02, 1.51670892e-02, 1.46367634e-02, + 3.69769707e-02, 7.03022778e-02], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.47649962e-02, + 7.00923026e-01, 2.60504693e-01, 9.65307504e-02, 7.03864172e-02, + 1.15635686e-01, 1.53913230e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 2.18164618e-03, 6.11085474e-01, 2.28024259e-01, 9.29291621e-02, + 1.14003479e-01, 1.54005408e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 1.39757351e-03, 5.95472097e-01, 2.54652113e-01, + 1.32362068e-01, 1.71157718e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00, 1.10507896e-03, 5.05610526e-01, + 2.00507417e-01, 1.41500503e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 6.53312833e-04, + 2.97714621e-01, 1.26633704e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 1.80651987e-04, 4.51902114e-02]])) + + +def test_get_arf(): + + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + + converter.get_arf(out_name="test") + + fits_file = fits.open("test.arf") + + assert np.allclose(fits_file[1].data.field("ENERG_LO"), + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(fits_file[1].data.field("ENERG_HI"), + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(fits_file[1].data.field("SPECRESP"), + np.array([0.06089862, 0.4563752, 1.1601573, 1.6237522, 2.0216975, + 2.2039971, 2.0773466, 1.7005537, 1.1626455, 0.80194914])) + + os.remove("test.arf") + + +def test_get_rmf(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + + converter.get_rmf(out_name="test") + + fits_file = fits.open("test.rmf") + + assert np.allclose(fits_file[1].data.field("ENERG_LO"), + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(fits_file[1].data.field("ENERG_HI"), + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(fits_file[1].data.field("N_GRP"), + np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])) + + matrix_flattened = [] + for i in fits_file[1].data.field("MATRIX"): + matrix_flattened += i.tolist() + + assert np.allclose(matrix_flattened, + [0.9809964299201965, + 0.019003521651029587, + 0.046832531690597534, + 0.9446346759796143, + 0.008532775565981865, + 0.01824718900024891, + 0.12847033143043518, + 0.848568856716156, + 0.0047136349603533745, + 0.009868173860013485, + 0.09384072571992874, + 0.21885812282562256, + 0.662667989730835, + 0.014764996245503426, + 0.005820374935865402, + 0.043238233774900436, + 0.1858610063791275, + 0.06197570636868477, + 0.7009230256080627, + 0.00218164618127048, + 0.003475720528513193, + 0.02238779515028, + 0.07394952327013016, + 0.027199288830161095, + 0.26050469279289246, + 0.6110854744911194, + 0.0013975735055282712, + 0.0028041559271514416, + 0.01630435325205326, + 0.04459220916032791, + 0.01516708917915821, + 0.09653075039386749, + 0.22802425920963287, + 0.5954720973968506, + 0.001105078961700201, + 0.0031390388030558825, + 0.017328761518001556, + 0.04066390544176102, + 0.014636763371527195, + 0.07038641721010208, + 0.0929291620850563, + 0.25465211272239685, + 0.5056105256080627, + 0.000653312832582742, + 0.004899099003523588, + 0.0280312392860651, + 0.0696888118982315, + 0.03697697073221207, + 0.11563568562269211, + 0.11400347948074341, + 0.13236206769943237, + 0.20050741732120514, + 0.29771462082862854, + 0.0001806519867386669, + 0.006687051150947809, + 0.03782569244503975, + 0.0927841067314148, + 0.07030227780342102, + 0.1539132297039032, + 0.15400540828704834, + 0.17115771770477295, + 0.14150050282478333, + 0.12663370370864868, + 0.04519021138548851]) + + os.remove("test.rmf") + + +def test_get_pha(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + converter.get_arf(out_name="test") + converter.get_rmf(out_name="test") + + counts = np.array([0.01094232, 0.04728866, 0.06744612, 0.01393708, 0.05420688, + 0.03141498, 0.01818584, 0.00717219, 0.00189568, 0.00010503]) * 1000 + + errors = np.sqrt(counts) + + converter.get_pha(src_counts=counts, errors=errors, exposure_time=10) + + os.remove("test.arf") + os.remove("test.rmf") + + fits_file = fits.open("test.pha") + os.remove("test.pha") + + assert np.allclose(fits_file[1].data.field("CHANNEL"), + np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])) + + assert np.allclose(fits_file[1].data.field("COUNTS"), + np.array([10, 47, 67, 13, 54, 31, 18, 7, 1, 0])) + + assert np.allclose(fits_file[1].data.field("STAT_ERR"), + np.array([3, 6, 8, 3, 7, 5, 4, 2, 1, 0])) + + +def test_plot_arf(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + converter.get_arf(out_name="test") + + converter.plot_arf() + + assert Path("Effective_area_for_test.png").exists() + + os.remove("test.arf") + os.remove("Effective_area_for_test.png") + + +def test_plot_rmf(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + converter.get_rmf(out_name="test") + + converter.plot_rmf() + + assert Path("Redistribution_matrix_for_test.png").exists() + + os.remove("test.rmf") + os.remove("Redistribution_matrix_for_test.png") diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index 966d098f..b22120b9 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -1,441 +1,213 @@ +from cosipy.response import FullDetectorResponse from cosipy import test_data -from pytest import approx -from cosipy import SpacecraftFile +from cosipy import SpacecraftHistory import numpy as np import astropy.units as u from astropy.coordinates import SkyCoord from astropy.io import fits import os from pathlib import Path -from astropy.time import Time +from astropy.time import Time -from cosipy.response import FullDetectorResponse +def test_get_time(): -energy_edges = 10**np.linspace(2, 4, 10 + 1) # ten bins from 100 to 10000 KeV + ori_path = test_data.path / "20280301_first_10sec.ori" + + ori = SpacecraftHistory.open(ori_path) + + assert np.allclose(ori.obstime.unix, + [1835478000.0, 1835478001.0, 1835478002.0, + 1835478003.0, 1835478004.0, 1835478005.0, + 1835478006.0, 1835478007.0, 1835478008.0, + 1835478009.0, 1835478010.0]) -def test_get_time(): + +def test_read_only_selected_range(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftHistory.open(ori_path, + tstart=Time(1835478002.0, format = 'unix'), + tstop = Time(1835478008.0, format='unix') + ) + + assert np.allclose(ori.obstime.unix, + [1835478002.0, + 1835478003.0, 1835478004.0, 1835478005.0, + 1835478006.0, 1835478007.0, 1835478008.0, 1835478009.0]) - start = 1835478000.0 - assert np.allclose(ori.get_time().value, - np.linspace(start, start + 10, 11)) + ori = SpacecraftHistory.open(ori_path, + tstart=Time(1835478002.5, format = 'unix'), + tstop = Time(1835478007.5, format='unix') + ) + assert np.allclose(ori.obstime.unix, + [1835478002.0, + 1835478003.0, 1835478004.0, 1835478005.0, + 1835478006.0, 1835478007.0, 1835478008.0]) def test_get_time_delta(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - time_delta = ori.get_time_delta() - time_delta.format = "sec" + ori = SpacecraftHistory.open(ori_path) + time_delta = ori.intervals_duration.to_value(u.s) - assert np.allclose(time_delta.value, np.ones(10)) + assert np.allclose(time_delta, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, + 1.000000, 1.000000, 1.000000, 1.000000, 1.000000])) -def test_altitude(): - - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - altitude = ori.get_altitude() + time_delta = ori.livetime.to_value(u.s) - assert np.allclose(altitude, np.zeros(11)) + assert np.allclose(time_delta, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, + 1.000000, 1.000000, 1.000000, 1.000000, 1.000000])) def test_get_attitude(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - attitude = ori.get_attitude() + ori = SpacecraftHistory.open(ori_path) + + attitude = ori.attitude matrix = np.array([[[0.215904, -0.667290, -0.712818], [0.193436, 0.744798, -0.638638], [0.957062, 0.000000, 0.289883]], - + [[0.216493, -0.667602, -0.712347], [0.194127, 0.744518, -0.638754], [0.956789, 0.000000, 0.290783]], - + [[0.217081, -0.667914, -0.711875], [0.194819, 0.744238, -0.638870], [0.956515, -0.000000, 0.291683]], - + [[0.217669, -0.668227, -0.711402], [0.195511, 0.743958, -0.638985], [0.956240, 0.000000, 0.292582]], - + [[0.218255, -0.668539, -0.710929], [0.196204, 0.743677, -0.639100], [0.955965, 0.000000, 0.293481]], - + [[0.218841, -0.668852, -0.710455], [0.196897, 0.743396, -0.639214], [0.955688, -0.000000, 0.294380]], - + [[0.219426, -0.669165, -0.709980], [0.197590, 0.743114, -0.639327], [0.955411, 0.000000, 0.295279]], - + [[0.220010, -0.669477, -0.709504], [0.198284, 0.742833, -0.639440], [0.955133, -0.000000, 0.296177]], - + [[0.220594, -0.669790, -0.709027], [0.198978, 0.742551, -0.639552], [0.954854, 0.000000, 0.297075]], - + [[0.221176, -0.670103, -0.708550], [0.199673, 0.742268, -0.639663], [0.954574, -0.000000, 0.297973]], - + [[0.221758, -0.670416, -0.708072], [0.200368, 0.741986, -0.639773], [0.954294, -0.000000, 0.298871]]]) - + assert np.allclose(attitude.as_matrix(), matrix) -def test_get_target_in_sc_frame(): - - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - assert np.allclose(path_in_sc.lon.deg, - np.array([118.393522, 118.425255, 118.456868, 118.488362, 118.519735, - 118.550989, 118.582124, 118.613139, 118.644035, 118.674813, 118.705471])) - - assert np.allclose(path_in_sc.lat.deg, - np.array([46.733430, 46.687559, 46.641664, 46.595745, 46.549801, 46.503833, - 46.457841, 46.411825, 46.365785, 46.319722, 46.273634])) - - -def test_get_dwell_map(): - - response_path =test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - response = FullDetectorResponse.open(response_path) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) - - assert np.allclose(dwell_map[:].value, - np.array([1.895057, 7.615584, 0.244679, 0.244679, 0.000000, 0.000000, - 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000])) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc, - interp = False) - - response.close() - - assert np.allclose(dwell_map[:].value, - np.array([ 0., 10., 0., 0., 0., 0., - 0., 0., 0., 0., 0., 0.])) - - -def test_get_scatt_map(): - - response_path =test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - # test without earth occultation, as Crab is entirely occluded; - # TODO: use a better .ori file for testing - scatt_map = ori.get_scatt_map(nside=16, earth_occ=False) - ax_map = scatt_map.get_axes_map(nside=16) - -def test_get_psr_rsp(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - response = FullDetectorResponse.open(response_path) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) - - Ei_edges, Ei_lo, Ei_hi, Em_edges, Em_lo, Em_hi, areas, matrix = ori.get_psr_rsp(response_path, dwell_map) - - response.close() - - assert np.allclose(Ei_edges, energy_edges) - - assert np.allclose(Ei_lo, energy_edges[:-1]) - - assert np.allclose(Ei_hi, energy_edges[1:]) - - assert np.allclose(Em_edges, energy_edges) - - assert np.allclose(Em_lo, energy_edges[:-1]) - - assert np.allclose(Em_hi, energy_edges[1:]) - - assert np.allclose(areas, - np.array([ 9.07843857, 35.97189941, 56.56903076, 58.62650146, 53.77538452, - 46.66890564, 37.5471283, 25.56105347, 18.39017029, 10.23398438])) - - assert np.allclose(matrix, - np.array([[9.82146084e-01, 6.52569011e-02, 3.30404416e-02, 1.34480894e-02, - 8.81888345e-03, 7.15653040e-03, 6.46192394e-03, 6.94540003e-03, - 7.08964514e-03, 9.14793275e-03], - [1.78539176e-02, 9.27872598e-01, 1.37546435e-01, 8.62949491e-02, - 5.51867969e-02, 4.31010798e-02, 3.65878679e-02, 3.69836800e-02, - 3.58317234e-02, 4.46425714e-02], - [0.00000000e+00, 6.87047699e-03, 8.26300919e-01, 1.80046827e-01, - 9.57962275e-02, 7.33733699e-02, 6.65754601e-02, 7.09649101e-02, - 6.98765442e-02, 8.52129683e-02], - [0.00000000e+00, 0.00000000e+00, 3.11220298e-03, 7.18503475e-01, - 1.78951785e-01, 7.96607733e-02, 6.17865399e-02, 6.78083599e-02, - 7.75652826e-02, 1.12138554e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.70663046e-03, - 6.60251915e-01, 1.66121393e-01, 6.80495277e-02, 5.26736267e-02, - 4.41736877e-02, 4.98283207e-02], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 9.94389760e-04, 6.30014181e-01, 1.64825916e-01, 6.65939748e-02, - 4.36101966e-02, 4.12763469e-02], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 5.72687772e-04, 5.95490038e-01, 2.90101558e-01, - 1.56857163e-01, 9.14273262e-02], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 0.00000000e+00, 2.22623014e-04, 4.07899320e-01, - 4.00614947e-01, 2.29005918e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 2.92088534e-05, - 1.64380059e-01, 3.01594704e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 7.36859079e-07, 3.57253887e-02]])) - - -def test_get_arf(): - - response_path = test_data.path / "test_full_detector_response.h5" +def test_interp_attitude(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - response = FullDetectorResponse.open(response_path) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) - - _ = ori.get_psr_rsp(response_path, dwell_map) - - ori.get_arf(out_name = "test") - - fits_file = fits.open("test.arf") + ori = SpacecraftHistory.open(ori_path) - assert np.allclose(fits_file[1].data.field("ENERG_LO"), energy_edges[:-1]) + assert np.allclose(ori.interp_attitude(Time(1835478000.5, format = 'unix')).as_quat(), [ 0.21284241, -0.55635581, 0.28699984, 0.75019825]) - assert np.allclose(fits_file[1].data.field("ENERG_HI"), energy_edges[1:]) + # Multiple + assert np.allclose(ori.interp_attitude(Time([1835478000.5, 1835478000.5], format='unix')).as_quat(), + [[0.21284241, -0.55635581, 0.28699984, 0.75019825],[0.21284241, -0.55635581, 0.28699984, 0.75019825]]) - assert np.allclose(fits_file[1].data.field("SPECRESP"), - np.array([ 9.07843857, 35.97189941, 56.56903076, 58.62650146, 53.77538452, - 46.66890564, 37.5471283, 25.56105347, 18.39017029, 10.23398438])) + # Test edges + assert np.allclose(ori.interp_attitude(Time(1835478000.0, format='unix')).as_quat(), ori.attitude[0].as_quat()) + assert np.allclose(ori.interp_attitude(Time(1835478001.0, format='unix')).as_quat(), ori.attitude[1].as_quat()) - response.close() - - os.remove("test.arf") - -def test_get_rmf(): - - response_path = test_data.path / "test_full_detector_response.h5" +def test_interp_location(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - response = FullDetectorResponse.open(response_path) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) + ori = SpacecraftHistory.open(ori_path) - _ = ori.get_psr_rsp(response_path, dwell_map) + assert np.allclose(ori.interp_location(Time(1835478000.5, format = 'unix')).cartesian.xyz.to_value(u.km), [ -378.74248737, -6048.59116724, -3346.84533097]) - ori.get_rmf(out_name = "test") + # Multiple + assert np.allclose(ori.interp_location(Time([1835478000.5,1835478000.5], format='unix')).cartesian.xyz.to_value(u.km), + np.transpose([[-378.74248737, -6048.59116724, -3346.84533097],[-378.74248737, -6048.59116724, -3346.84533097]])) - fits_file = fits.open("test.rmf") + # Test edges + assert np.allclose(ori.interp_location(Time(1835478000.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[0].cartesian.xyz.to_value(u.km)) + assert np.allclose(ori.interp_location(Time(1835478001.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[1].cartesian.xyz.to_value(u.km)) - assert np.allclose(fits_file[1].data.field("ENERG_LO"), energy_edges[:-1]) - assert np.allclose(fits_file[1].data.field("ENERG_HI"), energy_edges[1:]) - - assert np.allclose(fits_file[1].data.field("N_GRP"), np.ones(10)) - - matrix_flattened = [] - for i in fits_file[1].data.field("MATRIX"): - matrix_flattened += i.tolist() - - assert np.allclose(matrix_flattened, - np.array([0.9821460843086243, 0.01785391755402088, 0.06525690108537674, 0.9278725981712341, 0.006870476994663477, - 0.03304044157266617, 0.13754643499851227, 0.8263009190559387, 0.003112202975898981, 0.013448089361190796, - 0.08629494905471802, 0.18004682660102844, 0.718503475189209, 0.0017066304571926594, 0.008818883448839188, - 0.05518679693341255, 0.09579622745513916, 0.17895178496837616, 0.6602519154548645, 0.0009943897603079677, - 0.007156530395150185, 0.043101079761981964, 0.07337336987257004, 0.07966077327728271, 0.16612139344215393, - 0.630014181137085, 0.0005726877716369927, 0.0064619239419698715, 0.03658786788582802, 0.06657546013593674, - 0.06178653985261917, 0.06804952770471573, 0.1648259162902832, 0.595490038394928, 0.00022262301354203373, - 0.006945400033146143, 0.0369836799800396, 0.07096491008996964, 0.0678083598613739, 0.05267362669110298, - 0.06659397482872009, 0.290101557970047, 0.40789932012557983, 2.920885344792623e-05, 0.0070896451361477375, - 0.03583172336220741, 0.0698765441775322, 0.0775652825832367, 0.04417368769645691, 0.04361019656062126, - 0.15685716271400452, 0.4006149470806122, 0.1643800586462021, 7.368590786427376e-07, 0.00914793275296688, - 0.04464257135987282, 0.08521296828985214, 0.11213855445384979, 0.04982832074165344, 0.041276346892118454, - 0.09142732620239258, 0.22900591790676117, 0.30159470438957214, 0.035725388675928116])) - - response.close() - - os.remove("test.rmf") - - -def test_get_pha(): +def test_get_target_in_sc_frame(): - response_path = test_data.path / "test_full_detector_response.h5" ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftHistory.open(ori_path) - target_name = "Crab" target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") path_in_sc = ori.get_target_in_sc_frame(target_coord) - response = FullDetectorResponse.open(response_path) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) - _ = ori.get_psr_rsp(response_path, dwell_map) - ori.get_arf(out_name = "test") - ori.get_rmf(out_name = "test") - - counts = np.array([0.01094232, 0.04728866, 0.06744612, 0.01393708, 0.05420688, - 0.03141498, 0.01818584, 0.00717219, 0.00189568, 0.00010503])*1000 - - errors = np.sqrt(counts) - - ori.get_pha(src_counts=counts, errors=errors, exposure_time=10) - - response.close() - - os.remove("test.arf") - os.remove("test.rmf") - - fits_file = fits.open("test.pha") - os.remove("test.pha") - - assert np.allclose(fits_file[1].data.field("CHANNEL"), - np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])) + assert np.allclose(path_in_sc.lon.deg, + np.array([118.393522, 118.425255, 118.456868, 118.488362, 118.519735, + 118.550989, 118.582124, 118.613139, 118.644035, 118.674813, 118.705471])) - assert np.allclose(fits_file[1].data.field("COUNTS"), - np.array([10, 47, 67, 13, 54, 31, 18, 7, 1, 0])) + assert np.allclose(path_in_sc.lat.deg, + np.array([46.733430, 46.687559, 46.641664, 46.595745, 46.549801, 46.503833, + 46.457841, 46.411825, 46.365785, 46.319722, 46.273634])) - assert np.allclose(fits_file[1].data.field("STAT_ERR"), - np.array([3, 6, 8, 3, 7, 5, 4, 2, 1, 0])) -def test_plot_arf(): +def test_get_dwell_map(): - response_path = test_data.path / "test_full_detector_response.h5" ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" + ori = SpacecraftHistory.open(ori_path) + target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - response = FullDetectorResponse.open(response_path) - - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) - _ = ori.get_psr_rsp(response_path, dwell_map) - ori.get_arf(out_name = "test") - - response.close() - - ori.plot_arf() - - assert Path("Effective_area_for_test.png").exists() - - os.remove("test.arf") - os.remove("Effective_area_for_test.png") + dwell_map = ori.get_dwell_map(target_coord, nside=1, scheme = 'ring') + + assert np.allclose(dwell_map[:].to_value(u.s), + np.array([1.895057, 7.615584, 0.244679, 0.244679, 0.000000, 0.000000, + 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000])) -def test_plot_rmf(): +def test_select_interval(): - response_path = test_data.path / "test_full_detector_response.h5" ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_coord) - - response = FullDetectorResponse.open(response_path) + ori = SpacecraftHistory.open(ori_path) - dwell_map = ori.get_dwell_map(base = response, - src_path = path_in_sc) - _ = ori.get_psr_rsp(response_path, dwell_map) - ori.get_rmf(out_name = "test") + new_ori = ori.select_interval(ori.tstart+0.1*u.s, ori.tstart+2.1*u.s) - ori.plot_rmf() + x, y, z = new_ori.attitude.as_axes() - assert Path("Redistribution_matrix_for_test.png").exists() - - response.close() - - os.remove("test.rmf") - os.remove("Redistribution_matrix_for_test.png") - - -def test_source_interval(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + assert np.allclose(new_ori.obstime.unix, + np.array([1.835478e+09, 1.835478e+09, 1.835478e+09, 1.835478e+09])) - times = ori.get_time().to_value(format = "unix") - new_ori = ori.source_interval(Time(times[0]+0.1, format = "unix"), - Time(times[0]+2.1, format = "unix")) + assert np.allclose(np.asarray([x.transform_to('galactic').l.deg, x.transform_to('galactic').b.deg]).transpose().flatten(), + np.array([41.86062093, 73.14368765, 41.88225011, 73.09517927, + 41.90629597, 73.0412838 , 41.9087019 , 73.03589454])) - assert np.allclose(new_ori.get_time().to_value(format="unix"), - np.array([1.835478e+09, 1.835478e+09, 1.835478e+09, 1.835478e+09])) + assert np.allclose(np.asarray([z.transform_to('galactic').l.deg, z.transform_to('galactic').b.deg]).transpose().flatten(), + np.array([221.86062093, 16.85631235, 221.88225011, 16.90482073, + 221.90629597, 16.9587162 , 221.9087019 , 16.96410546])) - assert np.allclose(np.sum(new_ori.livetime), (2.1 - 0.1)) + # Edge cases + new_ori = ori.select_interval(ori.tstart, ori.tstop) + assert np.all(new_ori.obstime == ori.obstime) - assert np.allclose(new_ori.x_pointings.l.value, - np.array([41.86062429, 41.88225011, 41.90629597, 41.90870524])) - assert np.allclose(new_ori.x_pointings.b.value, - np.array([73.14368765, 73.09517927, 73.0412838, 73.03589454])) + new_ori = ori.select_interval(ori.obstime[1], ori.tstop) + assert np.all(new_ori.obstime == ori.obstime[1:]) - assert np.allclose(new_ori.z_pointings.l.value, - np.array([221.86062062, 221.88225011, 221.90629597, 221.90870159])) - assert np.allclose(new_ori.z_pointings.b.value, - np.array([16.85631235, 16.90482073, 16.9587162, 16.96410546])) + new_ori = ori.select_interval(ori.tstart, ori.obstime[-2]) + assert np.all(new_ori.obstime == ori.obstime[:-1]) - new_ori = ori.source_interval(Time(times[0]+0.1, format = "unix"), - Time(times[0]+0.8, format = "unix")) + # Fully within single interval + new_ori = ori.select_interval(ori.tstart + .4*u.s, ori.tstart + .6*u.s) + assert new_ori.tstart == ori.tstart + .4*u.s + assert new_ori.tstop == ori.tstart + .6*u.s + assert new_ori.nintervals == 1 + assert np.isclose(new_ori.livetime[0], 0.2*u.s) - assert np.allclose(np.sum(new_ori.livetime), (0.8 - 0.1)) diff --git a/tests/threeml/test_spectral_fitting.py b/tests/threeml/test_spectral_fitting.py index bb4cc9ee..436ab001 100644 --- a/tests/threeml/test_spectral_fitting.py +++ b/tests/threeml/test_spectral_fitting.py @@ -1,21 +1,30 @@ -from cosipy import COSILike, test_data, BinnedData -from cosipy.spacecraftfile import SpacecraftFile +import sys + +from cosipy import test_data, BinnedData +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.data_io import EmCDSBinnedData +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, FullDetectorResponse, BinnedInstrumentResponse, \ + BinnedThreeMLPointSourceResponse +from cosipy.spacecraftfile import SpacecraftHistory import astropy.units as u import numpy as np from threeML import Band, PointSource, Model, JointLikelihood, DataList from astromodels import Parameter from astropy.coordinates import SkyCoord +from cosipy.statistics import PoissonLikelihood + data_path = test_data.path -sc_orientation = SpacecraftFile.parse_from_file(data_path / "20280301_2s.ori") -dr = str(data_path / "test_full_detector_response.h5") # path to detector response +sc_orientation = SpacecraftHistory.open(data_path / "20280301_2s.ori") +dr_path = str(data_path / "test_full_detector_response.h5") # path to detector response -data = BinnedData(data_path / "test_spectral_fit.yaml") -background = BinnedData(data_path / "test_spectral_fit.yaml") +crab = BinnedData(data_path / "test_spectral_fit.yaml") +bkg_dist = BinnedData(data_path / "test_spectral_fit.yaml") -data.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_data.h5") -background.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_background.h5") +crab.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_data.h5") +bkg_dist.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_background.h5") bkg_par = Parameter("background_cosi", # background parameter 1, # initial value of parameter @@ -52,14 +61,38 @@ model = Model(source) -def test_point_source_spectral_fit(): +def test_point_source_spectral_fit(background=None): + + dr = FullDetectorResponse.open(dr_path) + instrument_response = BinnedInstrumentResponse(dr) + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + global bkg_dist # Was giving the error "UnboundLocalError: cannot access local variable 'bkg_dist' where it is not associated with a value" + bkg_dist = bkg_dist.binned_data.project('Em', 'Phi', 'PsiChi') + bkg_dist += sys.float_info.min + + data = EmCDSBinnedData(crab.binned_data.project('Em', 'Phi', 'PsiChi') + bkg_dist) + bkg = FreeNormBinnedBackground(bkg_dist, + sc_history=sc_orientation, + copy=False) - cosi = COSILike("cosi", # COSI 3ML plugin - dr = dr, # detector response - data = data.binned_data.project('Em', 'Phi', 'PsiChi'), # data (source+background) - bkg = background.binned_data.project('Em', 'Phi', 'PsiChi'), # background model - sc_orientation = sc_orientation, # spacecraft orientation - nuisance_param = bkg_par) # background parameter + psr = BinnedThreeMLPointSourceResponse(data=data, + instrument_response=instrument_response, + sc_history=sc_orientation, + energy_axis=dr.axes['Ei'], + polarization_axis=dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside=2 * data.axes['PsiChi'].nside) + + response = BinnedThreeMLModelFolding(data=data, point_source_response=psr) + + like_fun = PoissonLikelihood(data, response, bkg) + + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response, + bkg) plugins = DataList(cosi) @@ -76,7 +109,3 @@ def test_point_source_spectral_fit(): assert np.allclose([cosi.get_log_like()], [213.14242014103897], atol=[1.0]) - - # Test scatt map method: - coord = SkyCoord(l=184.56*u.deg,b=-5.78*u.deg,frame="galactic") - cosi._get_scatt_map(coord) diff --git a/tests/ts_map/test_fast_ts_map.py b/tests/ts_map/test_fast_ts_map.py index 8b942878..52ee7662 100644 --- a/tests/ts_map/test_fast_ts_map.py +++ b/tests/ts_map/test_fast_ts_map.py @@ -1,3 +1,11 @@ +from cosipy import test_data +from pytest import approx +from threeML import Powerlaw +from cosipy import FastTSMap, SpacecraftHistory +from histpy import Histogram +import numpy as np +from astropy.coordinates import SkyCoord +import astropy.units as u from pathlib import Path import os @@ -11,7 +19,7 @@ from histpy import Histogram from cosipy import test_data -from cosipy import FastTSMap, MOCTSMap, SpacecraftFile +from cosipy import FastTSMap, MOCTSMap, SpacecraftHistory def test_ts_fit(): @@ -20,7 +28,7 @@ def test_ts_fit(): response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftHistory.open(orientation_path) src_bkg = Histogram.open(src_bkg_path).project(['Em', 'PsiChi', 'Phi']) bkg = Histogram.open(bkg_path).project(['Em', 'PsiChi', 'Phi']) @@ -106,7 +114,7 @@ def test_moc_ts_fit(): response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftHistory.open(orientation_path) src_bkg = Histogram.open(src_bkg_path).project(['Em', 'PsiChi', 'Phi']) bkg = Histogram.open(bkg_path).project(['Em', 'PsiChi', 'Phi'])