diff --git a/cosipy/image_deconvolution/MAP_RichardsonLucy.py b/cosipy/image_deconvolution/MAP_RichardsonLucy.py index d3b2db6e..938d4ccd 100644 --- a/cosipy/image_deconvolution/MAP_RichardsonLucy.py +++ b/cosipy/image_deconvolution/MAP_RichardsonLucy.py @@ -9,8 +9,8 @@ from histpy import Histogram from .RichardsonLucySimple import RichardsonLucySimple - from .prior_tsv import PriorTSV +from .constants import DEFAULT_STOPPING_THRESHOLD class MAP_RichardsonLucy(RichardsonLucySimple): """ @@ -79,7 +79,7 @@ def __init__(self, initial_model, dataset, mask, parameter): # stopping criteria self.stopping_criteria_statistics = parameter.get('stopping_criteria:statistics', "log-posterior") - self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', 1e-2) + self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', DEFAULT_STOPPING_THRESHOLD) if not self.stopping_criteria_statistics in ["log-likelihood", "log-posterior"]: raise ValueError @@ -306,28 +306,25 @@ def finalization(self): """ finalization after running the image deconvolution """ - if self.save_results == True: logger.info(f'Saving results in {self.save_results_directory}') counter_name = "iteration" - - # model - histkey_filename = [("model", f"{self.save_results_directory}/model.hdf5"), - ("prior_filter", f"{self.save_results_directory}/prior_filter.hdf5")] - for key, filename in histkey_filename: + # model + histogram_keys = [("model", f"{self.save_results_directory}/model.hdf5", self.save_only_final_result), + ("prior_filter", f"{self.save_results_directory}/prior_filter.hdf5", self.save_only_final_result)] - self.save_histogram(filename = filename, - counter_name = counter_name, - histogram_key = key, - only_final_result = self.save_only_final_result) - #fits fits_filename = f'{self.save_results_directory}/results.fits' - self.save_results_as_fits(filename = fits_filename, - counter_name = counter_name, - values_key_name_format = [("log-posterior", "LOG-POSTERIOR", "D")], - dicts_key_name_format = [("background_normalization", "BKG_NORM", "D"), ("log-prior", "LOG-PRIOR", "D")], - lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")]) + values_key_name_format = [("log-posterior", "LOG-POSTERIOR", "D")] + dicts_key_name_format = [("background_normalization", "BKG_NORM", "D"), ("log-prior", "LOG-PRIOR", "D")] + lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")] + + self._save_standard_results(counter_name, + histogram_keys, + fits_filename, + values_key_name_format, + dicts_key_name_format, + lists_key_name_format) diff --git a/cosipy/image_deconvolution/RichardsonLucy.py b/cosipy/image_deconvolution/RichardsonLucy.py index 235b3d39..a7553b60 100644 --- a/cosipy/image_deconvolution/RichardsonLucy.py +++ b/cosipy/image_deconvolution/RichardsonLucy.py @@ -9,6 +9,8 @@ from .RichardsonLucySimple import RichardsonLucySimple +from .constants import DEFAULT_STOPPING_THRESHOLD + class RichardsonLucy(RichardsonLucySimple): """ A class for the RichardsonLucy algorithm. @@ -60,7 +62,7 @@ def __init__(self, initial_model, dataset, mask, parameter): # stopping criteria self.stopping_criteria_statistics = parameter.get('stopping_criteria:statistics', "log-likelihood") - self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', 1e-2) + self.stopping_criteria_threshold = parameter.get('stopping_criteria:threshold', DEFAULT_STOPPING_THRESHOLD) if not self.stopping_criteria_statistics in ["log-likelihood"]: raise ValueError @@ -198,25 +200,23 @@ def finalization(self): counter_name = "iteration" # model - histkey_filename = [("model", f"{self.save_results_directory}/model.hdf5"), - ("delta_model", f"{self.save_results_directory}/delta_model.hdf5"), - ("processed_delta_model", f"{self.save_results_directory}/processed_delta_model.hdf5")] - - for key, filename in histkey_filename: - - self.save_histogram(filename = filename, - counter_name = counter_name, - histogram_key = key, - only_final_result = self.save_only_final_result) + histogram_keys = [("model", f"{self.save_results_directory}/model.hdf5", self.save_only_final_result), + ("delta_model", f"{self.save_results_directory}/delta_model.hdf5", self.save_only_final_result), + ("processed_delta_model", f"{self.save_results_directory}/processed_delta_model.hdf5", self.save_only_final_result)] #fits fits_filename = f'{self.save_results_directory}/results.fits' - self.save_results_as_fits(filename = fits_filename, - counter_name = counter_name, - values_key_name_format = [("alpha", "ALPHA", "D")], - dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")], - lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")]) + values_key_name_format = [("alpha", "ALPHA", "D")] + dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")] + lists_key_name_format = [("log-likelihood", "LOG-LIKELIHOOD", "D")] + + self._save_standard_results(counter_name, + histogram_keys, + fits_filename, + values_key_name_format, + dicts_key_name_format, + lists_key_name_format) def calc_alpha(self, delta_model, model): """ diff --git a/cosipy/image_deconvolution/RichardsonLucySimple.py b/cosipy/image_deconvolution/RichardsonLucySimple.py index 22e4239c..841ab34e 100644 --- a/cosipy/image_deconvolution/RichardsonLucySimple.py +++ b/cosipy/image_deconvolution/RichardsonLucySimple.py @@ -7,6 +7,8 @@ from .deconvolution_algorithm_base import DeconvolutionAlgorithmBase +from .constants import DEFAULT_BKG_NORM_RANGE, DEFAULT_RESPONSE_WEIGHTING_INDEX + class RichardsonLucySimple(DeconvolutionAlgorithmBase): """ A class for the original RichardsonLucy algorithm. @@ -37,12 +39,12 @@ def __init__(self, initial_model, dataset, mask, parameter): # background normalization optimization self.do_bkg_norm_optimization = parameter.get('background_normalization_optimization:activate', False) if self.do_bkg_norm_optimization: - self.dict_bkg_norm_range = parameter.get('background_normalization_optimization:range', {key: [0.0, 100.0] for key in self.dict_bkg_norm.keys()}) + self.dict_bkg_norm_range = parameter.get('background_normalization_optimization:range', {key: DEFAULT_BKG_NORM_RANGE for key in self.dict_bkg_norm.keys()}) # response_weighting self.do_response_weighting = parameter.get('response_weighting:activate', False) if self.do_response_weighting: - self.response_weighting_index = parameter.get('response_weighting:index', 0.5) + self.response_weighting_index = parameter.get('response_weighting:index', DEFAULT_RESPONSE_WEIGHTING_INDEX) # saving results self.save_results = parameter.get('save_results:activate', False) @@ -184,28 +186,25 @@ def finalization(self): """ finalization after running the image deconvolution """ - if self.save_results == True: logger.info(f'Saving results in {self.save_results_directory}') counter_name = "iteration" - - # model - histkey_filename = [("model", f"{self.save_results_directory}/model.hdf5"), - ("delta_model", f"{self.save_results_directory}/delta_model.hdf5")] - for key, filename in histkey_filename: + # model + histogram_keys = [("model", f"{self.save_results_directory}/model.hdf5", self.save_only_final_result), + ("delta_model", f"{self.save_results_directory}/delta_model.hdf5", self.save_only_final_result)] - self.save_histogram(filename = filename, - counter_name = counter_name, - histogram_key = key, - only_final_result = self.save_only_final_result) - #fits fits_filename = f'{self.save_results_directory}/results.fits' - self.save_results_as_fits(filename = fits_filename, - counter_name = counter_name, - values_key_name_format = [], - dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")], - lists_key_name_format = []) + values_key_name_format = [] + dicts_key_name_format = [("background_normalization", "BKG_NORM", "D")] + lists_key_name_format = [] + + self._save_standard_results(counter_name, + histogram_keys, + fits_filename, + values_key_name_format, + dicts_key_name_format, + lists_key_name_format) diff --git a/cosipy/image_deconvolution/constants.py b/cosipy/image_deconvolution/constants.py new file mode 100644 index 00000000..34f9783e --- /dev/null +++ b/cosipy/image_deconvolution/constants.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# coding: UTF-8 +""" +Constants for the image deconvolution module. + +This module centralizes magic numbers and default values. +""" +import astropy.units as u + +# ============================================================================ +# Numerical Constants +# ============================================================================ + +NUMERICAL_ZERO = 1e-12 +"""Small value to avoid division by zero in expectation calculations.""" + +CHUNK_SIZE_FITS = 998 +"""Maximum columns in FITS table (FITS limit is 1000, using 998 for safety).""" + +# ============================================================================ +# Physical Constants +# ============================================================================ + +EARTH_RADIUS_KM = 6378.0 +"""Earth radius in km (WGS84 equatorial radius).""" + +# ============================================================================ +# Default Values - General Algorithm Parameters +# ============================================================================ + +DEFAULT_MINIMUM_FLUX = 0.0 +"""Default minimum flux to enforce non-negativity.""" + +DEFAULT_ITERATION_MAX = 1 +"""Default maximum number of iterations.""" + +DEFAULT_STOPPING_THRESHOLD = 1e-2 +"""Default convergence threshold for log-likelihood change.""" + +# ============================================================================ +# Default Values - Background Normalization +# ============================================================================ + +DEFAULT_BKG_NORM_RANGE = [0.0, 100.0] +"""Default allowed range [min, max] for background normalization factors.""" + +# ============================================================================ +# Default Values - Response Weighting +# ============================================================================ + +DEFAULT_RESPONSE_WEIGHTING_INDEX = 0.5 +"""Default power index for response weighting: filter = (exposure/max)^index.""" diff --git a/cosipy/image_deconvolution/coordsys_conversion_matrix.py b/cosipy/image_deconvolution/coordsys_conversion_matrix.py index ea0f74b2..ae6a84f8 100644 --- a/cosipy/image_deconvolution/coordsys_conversion_matrix.py +++ b/cosipy/image_deconvolution/coordsys_conversion_matrix.py @@ -10,6 +10,7 @@ from histpy import Histogram, Axes, Axis, HealpixAxis from .dataIF_COSI_DC2 import tensordot_sparse +from .constants import EARTH_RADIUS_KM class CoordsysConversionMatrix(Histogram): """ @@ -143,7 +144,7 @@ def spacecraft_attitude_binning_ccm(cls, full_detector_response, exposure_table, return coordsys_conv_matrix @classmethod - def _calc_exposure_time_map(cls, nside_model, num_pointings, earth_zenith, altitude, delta_time, is_nest_model = False, r_earth = 6378.0): + def _calc_exposure_time_map(cls, nside_model, num_pointings, earth_zenith, altitude, delta_time, is_nest_model = False, r_earth = EARTH_RADIUS_KM): """ Calculate exposure time map considering Earth occultation. @@ -167,7 +168,7 @@ def _calc_exposure_time_map(cls, nside_model, num_pointings, earth_zenith, altit Array of time intervals in seconds for each pointing. is_nest_model : bool, default False If True, use nested HEALPix pixel ordering scheme. If False, use ring ordering. - r_earth : float, default 6378.0 + r_earth : float, default EARTH_RADIUS_KM Earth's radius in kilometers. Returns diff --git a/cosipy/image_deconvolution/dataIF_COSI_DC2.py b/cosipy/image_deconvolution/dataIF_COSI_DC2.py index dd942afd..9d870680 100644 --- a/cosipy/image_deconvolution/dataIF_COSI_DC2.py +++ b/cosipy/image_deconvolution/dataIF_COSI_DC2.py @@ -12,6 +12,7 @@ from cosipy.response import FullDetectorResponse from .image_deconvolution_data_interface_base import ImageDeconvolutionDataInterfaceBase +from .constants import NUMERICAL_ZERO def tensordot_sparse(A, A_unit, B, axes): """ @@ -214,7 +215,7 @@ def _calc_exposure_map(self): logger.info("Finished...") - def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12): + def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = NUMERICAL_ZERO): """ Calculate expected counts from a given model. @@ -224,7 +225,7 @@ def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12): Model map dict_bkg_norm : dict, default None background normalization for each background model, e.g, {'albedo': 0.95, 'activation': 1.05} - almost_zero : float, default 1e-12 + almost_zero : float, default NUMERICAL_ZERO In order to avoid zero components in extended count histogram, a tiny offset is introduced. It should be small enough not to effect statistics. diff --git a/cosipy/image_deconvolution/dataIF_Parallel.py b/cosipy/image_deconvolution/dataIF_Parallel.py index 91b59372..a5976fb3 100644 --- a/cosipy/image_deconvolution/dataIF_Parallel.py +++ b/cosipy/image_deconvolution/dataIF_Parallel.py @@ -19,6 +19,7 @@ from cosipy.response import FullDetectorResponse from cosipy.image_deconvolution import ImageDeconvolutionDataInterfaceBase +from .constants import NUMERICAL_ZERO def load_response_matrix(comm, start_col, end_col, filename): ''' @@ -255,7 +256,7 @@ def _calc_exposure_map(self): logger.info("Finished...") - def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12): + def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = NUMERICAL_ZERO): """ Calculate expected counts from a given model. @@ -265,7 +266,7 @@ def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12): Model map dict_bkg_norm : dict, default None background normalization for each background model, e.g, {'albedo': 0.95, 'activation': 1.05} - almost_zero : float, default 1e-12 + almost_zero : float, default NUMERICAL_ZERO In order to avoid zero components in extended count histogram, a tiny offset is introduced. It should be small enough not to effect statistics. diff --git a/cosipy/image_deconvolution/deconvolution_algorithm_base.py b/cosipy/image_deconvolution/deconvolution_algorithm_base.py index 5c057413..496becdd 100644 --- a/cosipy/image_deconvolution/deconvolution_algorithm_base.py +++ b/cosipy/image_deconvolution/deconvolution_algorithm_base.py @@ -6,6 +6,8 @@ import logging logger = logging.getLogger(__name__) +from .constants import NUMERICAL_ZERO, DEFAULT_MINIMUM_FLUX, DEFAULT_ITERATION_MAX, CHUNK_SIZE_FITS + class DeconvolutionAlgorithmBase(ABC): """ A base class for image deconvolution algorithms. @@ -59,7 +61,7 @@ def __init__(self, initial_model, dataset, mask, parameter): logger.debug(f'dict_dataset_indexlist_for_bkg_models: {self.dict_dataset_indexlist_for_bkg_models}') # minimum flux - self.minimum_flux = parameter.get('minimum_flux:value', 0.0) + self.minimum_flux = parameter.get('minimum_flux:value', DEFAULT_MINIMUM_FLUX) minimum_flux_unit = parameter.get('minimum_flux:unit', initial_model.unit) if minimum_flux_unit is not None: @@ -67,7 +69,7 @@ def __init__(self, initial_model, dataset, mask, parameter): # parameters of the iteration self.iteration_count = 0 - self.iteration_max = parameter.get('iteration_max', 1) + self.iteration_max = parameter.get('iteration_max', DEFAULT_ITERATION_MAX) @abstractmethod def initialization(self): @@ -168,7 +170,7 @@ def iteration(self): return stop_iteration - def calc_expectation_list(self, model, dict_bkg_norm = None, almost_zero = 1e-12): + def calc_expectation_list(self, model, dict_bkg_norm = None, almost_zero = NUMERICAL_ZERO): """ Calculate a list of expected count histograms corresponding to each data in the registered dataset. @@ -178,7 +180,7 @@ def calc_expectation_list(self, model, dict_bkg_norm = None, almost_zero = 1e-12 Model dict_bkg_norm : dict, default None background normalization for each background model, e.g, {'albedo': 0.95, 'activation': 1.05} - almost_zero : float, default 1e-12 + almost_zero : float, default NUMERICAL_ZERO In order to avoid zero components in extended count histogram, a tiny offset is introduced. It should be small enough not to effect statistics. @@ -331,7 +333,7 @@ def save_results_as_fits(self, filename, counter_name, values_key_name_format, d dict_keys = list(self.results[0][key].keys()) - chunk_size = 998 # when the number of columns >= 1000, the fits file may not be saved. + chunk_size = CHUNK_SIZE_FITS # when the number of columns >= 1000, the fits file may not be saved. for i_chunk, chunked_dict_keys in enumerate([dict_keys[i:i+chunk_size] for i in range(0, len(dict_keys), chunk_size)]): cols_dict = [fits.Column(name=dict_key, array=[result[key][dict_key] for result in self.results], format=fits_format) for dict_key in chunked_dict_keys] @@ -358,3 +360,41 @@ def save_results_as_fits(self, filename, counter_name, values_key_name_format, d # write fits.HDUList(hdu_list).writeto(filename, overwrite=True) + + def _save_standard_results(self, counter_name, histogram_keys, fits_filename, + values_key_name_format=None, dicts_key_name_format=None, lists_key_name_format=None): + """ + Save standard results including histograms and FITS files. + + Parameters + ---------- + counter_name : str + Name of the counter (e.g., "iteration") + histogram_keys : list of tuple + List of (key, filename, only_final_result) for histograms to save. + fits_filename : str + Path to the FITS file. + values_key_name_format : list of tuple, optional + List of (key, name, fits_format) for single values to save in FITS. + dicts_key_name_format : list of tuple, optional + List of (key, name, fits_format) for dictionaries to save in FITS. + lists_key_name_format : list of tuple, optional + List of (key, name, fits_format) for lists to save in FITS. + """ + # Save histograms + for key, filename, only_final_result in histogram_keys: + self.save_histogram( + filename = filename, + counter_name = counter_name, + histogram_key = key, + only_final_result = only_final_result + ) + + # Save FITS file (use default if not specified) + self.save_results_as_fits( + filename = fits_filename, + counter_name = counter_name, + values_key_name_format = values_key_name_format if values_key_name_format is not None else [], + dicts_key_name_format = dicts_key_name_format if dicts_key_name_format is not None else [], + lists_key_name_format = lists_key_name_format if lists_key_name_format is not None else [] + ) diff --git a/cosipy/image_deconvolution/image_deconvolution.py b/cosipy/image_deconvolution/image_deconvolution.py index 1d771cf8..82575ae2 100644 --- a/cosipy/image_deconvolution/image_deconvolution.py +++ b/cosipy/image_deconvolution/image_deconvolution.py @@ -145,9 +145,14 @@ def model_initialization(self): # set self._model_class model_name = self.parameter['model_definition']['class'] # Options include "AllSkyImage", etc. - if not model_name in self.model_classes.keys(): # See model_classes dictionary declared above - logger.error(f'The model class "{model_name}" does not exist!') - raise ValueError + if model_name not in self.model_classes.keys(): # See model_classes dictionary declared above + available_models = ', '.join(self.model_classes.keys()) + error_msg = ( + f'Unknown model class "{model_name}". ' + f'Available models: {available_models}' + ) + logger.error(error_msg) + raise ValueError(error_msg) self._model_class = self.model_classes[model_name] @@ -191,9 +196,14 @@ def register_deconvolution_algorithm(self): algorithm_name = parameter_deconvolution['algorithm'] algorithm_parameter = Configurator(parameter_deconvolution['parameter']) - if not algorithm_name in self.deconvolution_algorithm_classes.keys(): - logger.error(f'The algorithm "{algorithm_name}" does not exist!') - raise ValueError + if algorithm_name not in self.deconvolution_algorithm_classes.keys(): + available_algorithms = ', '.join(self.deconvolution_algorithm_classes.keys()) + error_msg = ( + f'Unknown deconvolution algorithm "{algorithm_name}". ' + f'Available algorithms: {available_algorithms}' + ) + logger.error(error_msg) + raise ValueError(error_msg) self._deconvolution_class = self.deconvolution_algorithm_classes[algorithm_name] # Alias to class constructor self._deconvolution = self._deconvolution_class(initial_model = self.initial_model, # Initialize object for relevant class @@ -272,4 +282,4 @@ def _finalize(self): # only in the master node if self.is_master_node: - super()._finalize() \ No newline at end of file + super()._finalize() diff --git a/cosipy/image_deconvolution/image_deconvolution_data_interface_base.py b/cosipy/image_deconvolution/image_deconvolution_data_interface_base.py index 64e4f12c..58fddfea 100644 --- a/cosipy/image_deconvolution/image_deconvolution_data_interface_base.py +++ b/cosipy/image_deconvolution/image_deconvolution_data_interface_base.py @@ -1,5 +1,7 @@ from abc import ABC, abstractmethod +from .constants import NUMERICAL_ZERO + class ImageDeconvolutionDataInterfaceBase(ABC): """ A base class for managing data for image analysis, i.e., @@ -90,7 +92,7 @@ def summed_bkg_model(self, key): return self._summed_bkg_models[key] @abstractmethod - def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12): + def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = NUMERICAL_ZERO): """ Calculate expected counts from a given model map. @@ -100,7 +102,7 @@ def calc_expectation(self, model, dict_bkg_norm = None, almost_zero = 1e-12): Model dict_bkg_norm : dict, default None background normalization for each background model, e.g, {'albedo': 0.95, 'activation': 1.05} - almost_zero : float, default 1e-12 + almost_zero : float, default NUMERICAL_ZERO In order to avoid zero components in extended count histogram, a tiny offset is introduced. It should be small enough not to effect statistics.