Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 15 additions & 18 deletions cosipy/image_deconvolution/MAP_RichardsonLucy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
from histpy import Histogram

from .RichardsonLucySimple import RichardsonLucySimple

from .prior_tsv import PriorTSV
from .constants import DEFAULT_STOPPING_THRESHOLD

class MAP_RichardsonLucy(RichardsonLucySimple):
"""
Expand Down Expand Up @@ -79,7 +79,7 @@ def __init__(self, initial_model, dataset, mask, parameter):

# stopping criteria
self.stopping_criteria_statistics = parameter.get('stopping_criteria:statistics', "log-posterior")
self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', 1e-2)
self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', DEFAULT_STOPPING_THRESHOLD)

if not self.stopping_criteria_statistics in ["log-likelihood", "log-posterior"]:
raise ValueError
Expand Down Expand Up @@ -306,28 +306,25 @@ def finalization(self):
"""
finalization after running the image deconvolution
"""

if self.save_results == True:
logger.info(f'Saving results in {self.save_results_directory}')

counter_name = "iteration"

# model
histkey_filename = [("model", f"{self.save_results_directory}/model.hdf5"),
("prior_filter", f"{self.save_results_directory}/prior_filter.hdf5")]

for key, filename in histkey_filename:
# model
histogram_keys = [("model", f"{self.save_results_directory}/model.hdf5", self.save_only_final_result),
("prior_filter", f"{self.save_results_directory}/prior_filter.hdf5", self.save_only_final_result)]

self.save_histogram(filename = filename,
counter_name = counter_name,
histogram_key = key,
only_final_result = self.save_only_final_result)

#fits
fits_filename = f'{self.save_results_directory}/results.fits'

self.save_results_as_fits(filename = fits_filename,
counter_name = counter_name,
values_key_name_format = [("log-posterior", "LOG-POSTERIOR", "D")],
dicts_key_name_format = [("background_normalization", "BKG_NORM", "D"), ("log-prior", "LOG-PRIOR", "D")],
lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")])
values_key_name_format = [("log-posterior", "LOG-POSTERIOR", "D")]
dicts_key_name_format = [("background_normalization", "BKG_NORM", "D"), ("log-prior", "LOG-PRIOR", "D")]
lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")]

self._save_standard_results(counter_name,
histogram_keys,
fits_filename,
values_key_name_format,
dicts_key_name_format,
lists_key_name_format)
32 changes: 16 additions & 16 deletions cosipy/image_deconvolution/RichardsonLucy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

from .RichardsonLucySimple import RichardsonLucySimple

from .constants import DEFAULT_STOPPING_THRESHOLD

class RichardsonLucy(RichardsonLucySimple):
"""
A class for the RichardsonLucy algorithm.
Expand Down Expand Up @@ -60,7 +62,7 @@ def __init__(self, initial_model, dataset, mask, parameter):

# stopping criteria
self.stopping_criteria_statistics = parameter.get('stopping_criteria:statistics', "log-likelihood")
self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', 1e-2)
self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', DEFAULT_STOPPING_THRESHOLD)

if not self.stopping_criteria_statistics in ["log-likelihood"]:
raise ValueError
Expand Down Expand Up @@ -198,25 +200,23 @@ def finalization(self):
counter_name = "iteration"

# model
histkey_filename = [("model", f"{self.save_results_directory}/model.hdf5"),
("delta_model", f"{self.save_results_directory}/delta_model.hdf5"),
("processed_delta_model", f"{self.save_results_directory}/processed_delta_model.hdf5")]

for key, filename in histkey_filename:

self.save_histogram(filename = filename,
counter_name = counter_name,
histogram_key = key,
only_final_result = self.save_only_final_result)
histogram_keys = [("model", f"{self.save_results_directory}/model.hdf5", self.save_only_final_result),
("delta_model", f"{self.save_results_directory}/delta_model.hdf5", self.save_only_final_result),
("processed_delta_model", f"{self.save_results_directory}/processed_delta_model.hdf5", self.save_only_final_result)]

#fits
fits_filename = f'{self.save_results_directory}/results.fits'

self.save_results_as_fits(filename = fits_filename,
counter_name = counter_name,
values_key_name_format = [("alpha", "ALPHA", "D")],
dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")],
lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")])
values_key_name_format = [("alpha", "ALPHA", "D")]
dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")]
lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")]

self._save_standard_results(counter_name,
histogram_keys,
fits_filename,
values_key_name_format,
dicts_key_name_format,
lists_key_name_format)

def calc_alpha(self, delta_model, model):
"""
Expand Down
35 changes: 17 additions & 18 deletions cosipy/image_deconvolution/RichardsonLucySimple.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

from .deconvolution_algorithm_base import DeconvolutionAlgorithmBase

from .constants import DEFAULT_BKG_NORM_RANGE, DEFAULT_RESPONSE_WEIGHTING_INDEX

class RichardsonLucySimple(DeconvolutionAlgorithmBase):
"""
A class for the original RichardsonLucy algorithm.
Expand Down Expand Up @@ -37,12 +39,12 @@ def __init__(self, initial_model, dataset, mask, parameter):
# background normalization optimization
self.do_bkg_norm_optimization = parameter.get('background_normalization_optimization:activate', False)
if self.do_bkg_norm_optimization:
self.dict_bkg_norm_range = parameter.get('background_normalization_optimization:range', {key: [0.0, 100.0] for key in self.dict_bkg_norm.keys()})
self.dict_bkg_norm_range = parameter.get('background_normalization_optimization:range', {key: DEFAULT_BKG_NORM_RANGE for key in self.dict_bkg_norm.keys()})

# response_weighting
self.do_response_weighting = parameter.get('response_weighting:activate', False)
if self.do_response_weighting:
self.response_weighting_index = parameter.get('response_weighting:index', 0.5)
self.response_weighting_index = parameter.get('response_weighting:index', DEFAULT_RESPONSE_WEIGHTING_INDEX)

# saving results
self.save_results = parameter.get('save_results:activate', False)
Expand Down Expand Up @@ -184,28 +186,25 @@ def finalization(self):
"""
finalization after running the image deconvolution
"""

if self.save_results == True:
logger.info(f'Saving results in {self.save_results_directory}')

counter_name = "iteration"

# model
histkey_filename = [("model", f"{self.save_results_directory}/model.hdf5"),
("delta_model", f"{self.save_results_directory}/delta_model.hdf5")]

for key, filename in histkey_filename:
# model
histogram_keys = [("model", f"{self.save_results_directory}/model.hdf5", self.save_only_final_result),
("delta_model", f"{self.save_results_directory}/delta_model.hdf5", self.save_only_final_result)]

self.save_histogram(filename = filename,
counter_name = counter_name,
histogram_key = key,
only_final_result = self.save_only_final_result)

#fits
fits_filename = f'{self.save_results_directory}/results.fits'

self.save_results_as_fits(filename = fits_filename,
counter_name = counter_name,
values_key_name_format = [],
dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")],
lists_key_name_format = [])
values_key_name_format = []
dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")]
lists_key_name_format = []

self._save_standard_results(counter_name,
histogram_keys,
fits_filename,
values_key_name_format,
dicts_key_name_format,
lists_key_name_format)
52 changes: 52 additions & 0 deletions cosipy/image_deconvolution/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#!/usr/bin/env python
# coding: UTF-8
"""
Constants for the image deconvolution module.

This module centralizes magic numbers and default values.
"""
import astropy.units as u

# ============================================================================
# Numerical Constants
# ============================================================================

NUMERICAL_ZERO = 1e-12
"""Small value to avoid division by zero in expectation calculations."""

CHUNK_SIZE_FITS = 998
"""Maximum columns in FITS table (FITS limit is 1000, using 998 for safety)."""

# ============================================================================
# Physical Constants
# ============================================================================

EARTH_RADIUS_KM = 6378.0
"""Earth radius in km (WGS84 equatorial radius)."""

# ============================================================================
# Default Values - General Algorithm Parameters
# ============================================================================

DEFAULT_MINIMUM_FLUX = 0.0
"""Default minimum flux to enforce non-negativity."""

DEFAULT_ITERATION_MAX = 1
"""Default maximum number of iterations."""

DEFAULT_STOPPING_THRESHOLD = 1e-2
"""Default convergence threshold for log-likelihood change."""

# ============================================================================
# Default Values - Background Normalization
# ============================================================================

DEFAULT_BKG_NORM_RANGE = [0.0, 100.0]
"""Default allowed range [min, max] for background normalization factors."""

# ============================================================================
# Default Values - Response Weighting
# ============================================================================

DEFAULT_RESPONSE_WEIGHTING_INDEX = 0.5
"""Default power index for response weighting: filter = (exposure/max)^index."""
5 changes: 3 additions & 2 deletions cosipy/image_deconvolution/coordsys_conversion_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from histpy import Histogram, Axes, Axis, HealpixAxis

from .dataIF_COSI_DC2 import tensordot_sparse
from .constants import EARTH_RADIUS_KM

class CoordsysConversionMatrix(Histogram):
"""
Expand Down Expand Up @@ -143,7 +144,7 @@ def spacecraft_attitude_binning_ccm(cls, full_detector_response, exposure_table,
return coordsys_conv_matrix

@classmethod
def _calc_exposure_time_map(cls, nside_model, num_pointings, earth_zenith, altitude, delta_time, is_nest_model = False, r_earth = 6378.0):
def _calc_exposure_time_map(cls, nside_model, num_pointings, earth_zenith, altitude, delta_time, is_nest_model = False, r_earth = EARTH_RADIUS_KM):
"""
Calculate exposure time map considering Earth occultation.

Expand All @@ -167,7 +168,7 @@ def _calc_exposure_time_map(cls, nside_model, num_pointings, earth_zenith, altit
Array of time intervals in seconds for each pointing.
is_nest_model : bool, default False
If True, use nested HEALPix pixel ordering scheme. If False, use ring ordering.
r_earth : float, default 6378.0
r_earth : float, default EARTH_RADIUS_KM
Earth's radius in kilometers.

Returns
Expand Down
5 changes: 3 additions & 2 deletions cosipy/image_deconvolution/dataIF_COSI_DC2.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from cosipy.response import FullDetectorResponse

from .image_deconvolution_data_interface_base import ImageDeconvolutionDataInterfaceBase
from .constants import NUMERICAL_ZERO

def tensordot_sparse(A, A_unit, B, axes):
"""
Expand Down Expand Up @@ -214,7 +215,7 @@ def _calc_exposure_map(self):

logger.info("Finished...")

def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12):
def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = NUMERICAL_ZERO):
"""
Calculate expected counts from a given model.

Expand All @@ -224,7 +225,7 @@ def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12):
Model map
dict_bkg_norm : dict, default None
background normalization for each background model, e.g, {'albedo': 0.95, 'activation': 1.05}
almost_zero : float, default 1e-12
almost_zero : float, default NUMERICAL_ZERO
In order to avoid zero components in extended count histogram, a tiny offset is introduced.
It should be small enough not to effect statistics.

Expand Down
5 changes: 3 additions & 2 deletions cosipy/image_deconvolution/dataIF_Parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from cosipy.response import FullDetectorResponse
from cosipy.image_deconvolution import ImageDeconvolutionDataInterfaceBase
from .constants import NUMERICAL_ZERO

def load_response_matrix(comm, start_col, end_col, filename):
'''
Expand Down Expand Up @@ -255,7 +256,7 @@ def _calc_exposure_map(self):

logger.info("Finished...")

def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12):
def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = NUMERICAL_ZERO):
"""
Calculate expected counts from a given model.

Expand All @@ -265,7 +266,7 @@ def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12):
Model map
dict_bkg_norm : dict, default None
background normalization for each background model, e.g, {'albedo': 0.95, 'activation': 1.05}
almost_zero : float, default 1e-12
almost_zero : float, default NUMERICAL_ZERO
In order to avoid zero components in extended count histogram, a tiny offset is introduced.
It should be small enough not to effect statistics.

Expand Down
Loading