diff --git a/.gitignore b/.gitignore index a26c3e3..111942a 100644 --- a/.gitignore +++ b/.gitignore @@ -11,8 +11,9 @@ docs/_build # pixi environments .pixi/* !.pixi/config.toml - - -# data files -outputs/ -preproc_uproot/ \ No newline at end of file +# others +preproc_uproot/ +skimmed/ +test* +output* +_* diff --git a/README.md b/README.md index a4393d9..9187ae1 100644 --- a/README.md +++ b/README.md @@ -72,22 +72,30 @@ pip install --upgrade pip pip install -r requirements.txt ``` -#### Data Pre-processing +#### Data Skimming -The analysis expects pre-processed data files. If you do not have them, you can generate them by running the pre-processing step. This will download the necessary data from the CERN Open Data Portal and skim it according to the configuration. +The analysis expects skimmed data files. If you do not have them, you can generate them by running the skimming step. This will download the necessary data from the CERN Open Data Portal and skim it according to the configuration. ```bash -# This command overrides the default config to run only the pre-processing step. -# It may take a while to download and process the data. -python run.py general.run_preprocessing=True general.run_mva_training=False general.analysis=nondiff general.run_histogramming=False general.run_statistics=False +# This command runs only the skimming step to produce skimmed files +python analysis.py general.run_skimming=True general.analysis=skip + +# Or run skimming and then analysis in one command +python analysis.py general.run_skimming=True ``` +The skimming system provides three modes: + +1. **Skim-only mode**: `general.analysis=skip` - Only performs skimming, no analysis +2. **Skim-and-analyse mode**: `general.run_skimming=True` - Skims data then runs analysis +3. **Analysis-only mode**: `general.run_skimming=False` - Uses existing skimmed files for analysis + ### 2. Run the Differentiable Analysis -Once the pre-processed data is available, you can run the main analysis with a single command: +Once the skimmed data is available, you can run the main analysis with a single command: ```bash -python run.py +python analysis.py ``` ### 3. What is Happening? @@ -121,9 +129,18 @@ The default configuration (`user/configuration.py`) is set up to perform a diffe - [1. The Configuration File (`user/configuration.py`)](#1-the-configuration-file-userconfigurationpy) - [2. Defining Analysis Logic](#2-defining-analysis-logic) - [3. Running the Analysis](#3-running-the-analysis) +- [Config-Driven Skimming Framework](#config-driven-skimming-framework) + - [Dataset Configuration](#dataset-configuration) + - [Skimming Configuration](#skimming-configuration) + - [Selection Functions](#selection-functions) + - [Integration with Main Configuration](#integration-with-main-configuration) + - [Usage Examples](#usage-examples) + - [Advanced Features](#advanced-features) - [Configuration Reference](#configuration-reference) - [`general` Block](#general-block) - [`preprocess` Block](#preprocess-block) + - [`datasets` Block](#datasets-block) + - [`skimming` Block](#skimming-block) - [`jax` Block](#jax-block) - [`mva` Block](#mva-block) - [`channels` Block](#channels-block) @@ -303,6 +320,91 @@ The allowed top-level keys for CLI overrides are: Attempting to override other keys (e.g., `jax.params`) will result in an error. To change these, you must edit the `user/configuration.py` file directly. +## Skimming Integration + +The framework provides an integrated skimming system that handles data preprocessing before analysis. + +### Usage Modes + +The skimming system operates in three modes: + +1. **Skim-only**: `general.analysis=skip` - Only performs skimming, no analysis +2. **Skim-and-analyse**: `general.run_skimming=True` - Skims data then runs analysis +3. **Analysis-only**: `general.run_skimming=False` - Uses existing skimmed files + +### Dataset Configuration + +The dataset manager expects text files containing lists of ROOT file paths. Configure datasets in `user/skim.py` by pointing to these text files: + +```python +# user/skim.py - See existing implementation for details +dataset_manager_config = { + "datasets": [ + { + "name": "signal", + "directory": "datasets/signal/", # Directory containing .txt files with ROOT file lists + "cross_section": 1.0, + }, + # ... other datasets + ] +} +``` + +Each dataset directory should contain `.txt` files where each line is a path to a ROOT file. + +### Skimming Configuration + +Define your skimming selection in `user/cuts.py` (see `default_skim_selection` for reference) and configure it in `user/skim.py`: + +```python +# user/skim.py - See existing implementation for details +skimming_config = { + "nanoaod_selection": { + "function": default_skim_selection, + "use": [("Muon", None), ("Jet", None), ("PuppiMET", None), ("HLT", None)] + }, + "uproot_cut_string": "HLT_TkMu50*(PuppiMET_pt>50)", + # ... other settings +} +``` + +### Integration + +Connect the configurations in `user/configuration.py`: + +```python +# user/configuration.py - See existing implementation for details +from user.skim import dataset_manager_config, skimming_config + +config = { + "general": { + "run_skimming": False, # Set to True to enable + }, + "preprocess": { + "skimming": skimming_config + }, + "datasets": dataset_manager_config, + # ... rest of configuration +} +``` + +### Running + +```bash +# Skim and analyze +python analysis.py general.run_skimming=True + +# Skim only +python analysis.py general.run_skimming=True general.analysis=skip + +# Analyze with existing skimmed files +python analysis.py +``` + +The framework automatically manages file paths, creates output directories (`{output_dir}/skimmed/`), and handles the transition from skimming to analysis. + +--- + ## Configuration Reference The analysis is controlled by a central configuration dictionary, typically defined in `user/configuration.py`. @@ -348,6 +450,36 @@ Settings for the initial data skimming and filtering step. | `branches` | `dict` | *Required* | Mapping of collection names to branch lists. | | `ignore_missing` | `bool` | `False` | Ignore missing branches if `True`. | | `mc_branches` | `dict` | *Required* | Additional branches for MC samples. | +| `skimming` | `dict` | `None` | Skimming configuration (see `skimming` block below). | + +--- + +### `datasets` Block + +List of dataset configurations defining data sample properties. + +| Parameter | Type | Default | Description | +|------------------|------------|-------------|-----------------------------------------------------| +| `name` | `str` | *Required* | Unique dataset identifier. | +| `directory` | `str` | *Required* | Path to dataset files. | +| `cross_section` | `float` | *Required* | Cross-section in picobarns (pb). | +| `tree_name` | `str` | `"Events"` | ROOT tree name. | +| `weight_branch` | `str` | `"genWeight"` | Event weight branch name. | +| `metadata` | `dict` | `{}` | Additional dataset metadata. | + +--- + +### `skimming` Block + +Configuration for the data skimming step (part of `preprocess` block). + +| Parameter | Type | Default | Description | +|----------------------|------------|-------------------|------------------------------------------------| +| `selection_function` | `Callable` | *Required* | Selection function that returns a PackedSelection object. | +| `selection_use` | `list[tuple]` | *Required* | List of (object, variable) tuples specifying inputs for the selection function. | +| `output_dir` | `str` | *Required* | Base directory for skimmed files. Files follow structure: {output_dir}/{dataset}/file__{idx}/part_X.root | +| `chunk_size` | `int` | `100000` | Number of events to process per chunk (used for configuration compatibility). | +| `tree_name` | `str` | `"Events"` | ROOT tree name for input and output files. | --- diff --git a/analysis.py b/analysis.py index 8dd4753..4539c15 100644 --- a/analysis.py +++ b/analysis.py @@ -7,39 +7,30 @@ """ import logging import sys +import warnings + +from coffea.nanoevents import NanoAODSchema, NanoEventsFactory from analysis.diff import DifferentiableAnalysis from analysis.nondiff import NonDiffAnalysis from user.configuration import config as ZprimeConfig -from utils.input_files import construct_fileset -from utils.logging import ColoredFormatter +from utils.datasets import ConfigurableDatasetManager +from utils.logging import setup_logging, log_banner from utils.schema import Config, load_config_with_restricted_cli +from utils.metadata_extractor import NanoAODMetadataGenerator +from utils.skimming import process_workitems_with_skimming # ----------------------------- # Logging Configuration # ----------------------------- -root_logger = logging.getLogger() -root_logger.setLevel(logging.INFO) -handler = logging.StreamHandler(sys.stdout) -handler.setFormatter(ColoredFormatter()) -if root_logger.hasHandlers(): - root_logger.handlers.clear() -root_logger.addHandler(handler) +setup_logging() logger = logging.getLogger("AnalysisDriver") logging.getLogger("jax._src.xla_bridge").setLevel(logging.ERROR) -# ANSI color codes -MAGENTA = "\033[95m" -RESET = "\033[0m" +NanoAODSchema.warn_missing_crossrefs = False +warnings.filterwarnings("ignore", category=FutureWarning, module="coffea.*") -def _banner(text: str) -> str: - """Creates a magenta-colored banner for logging.""" - return ( - f"\n{MAGENTA}\n{'=' * 80}\n" - f"{' ' * ((80 - len(text)) // 2)}{text.upper()}\n" - f"{'=' * 80}{RESET}" - ) # ----------------------------- # Main Driver # ----------------------------- @@ -52,31 +43,52 @@ def main(): full_config = load_config_with_restricted_cli(ZprimeConfig, cli_args) config = Config(**full_config) # Pydantic validation logger.info(f"Luminosity: {config.general.lumi}") + dataset_manager = ( ConfigurableDatasetManager(config.datasets) + if config.datasets + else None + ) - fileset = construct_fileset( - max_files_per_sample=config.general.max_files - ) + logger.info(log_banner("metadata and workitems extraction")) + # Generate metadata and fileset from NanoAODs + generator = NanoAODMetadataGenerator(dataset_manager=dataset_manager) + generator.run(generate_metadata=config.general.run_metadata_generation) + fileset = generator.fileset + workitems = generator.workitems + if not workitems: + logger.error("No workitems available. Please ensure metadata generation completed successfully.") + sys.exit(1) - analysis_mode = config.general.analysis - if analysis_mode == "nondiff": - logger.info(_banner("Running Non-Differentiable Analysis")) - nondiff_analysis = NonDiffAnalysis(config) - nondiff_analysis.run_analysis_chain(fileset) + logger.info(log_banner("SKIMMING AND PROCESSING")) + logger.info(f"Processing {len(workitems)} workitems") + + # Process workitems with dask-awkward + processed_datasets = process_workitems_with_skimming(workitems, config, fileset, generator.nanoaods_summary) + + analysis_mode = config.general.analysis + if analysis_mode == "skip": + logger.info(log_banner("Skim-Only Mode: Skimming Complete")) + logger.info("✅ Skimming completed successfully. Analysis skipped as requested.") + logger.info(f"Skimmed files are available in the configured output directories.") + return + elif analysis_mode == "nondiff": + logger.info(log_banner("Running Non-Differentiable Analysis")) + nondiff_analysis = NonDiffAnalysis(config, processed_datasets) + nondiff_analysis.run_analysis_chain() elif analysis_mode == "diff": - logger.info(_banner("Running Differentiable Analysis")) - diff_analysis = DifferentiableAnalysis(config) - diff_analysis.run_analysis_optimisation(fileset) - else: - logger.info(_banner("Running both Non-Differentiable and Differentiable Analysis")) + logger.info(log_banner("Running Differentiable Analysis")) + diff_analysis = DifferentiableAnalysis(config, processed_datasets) + diff_analysis.run_analysis_optimisation() + else: # "both" + logger.info(log_banner("Running both Non-Differentiable and Differentiable Analysis")) # Non-differentiable analysis logger.info("Running Non-Differentiable Analysis") - nondiff_analysis = NonDiffAnalysis(config) - nondiff_analysis.run_analysis_chain(fileset) + nondiff_analysis = NonDiffAnalysis(config, processed_datasets) + nondiff_analysis.run_analysis_chain() # Differentiable analysis logger.info("Running Differentiable Analysis") - diff_analysis = DifferentiableAnalysis(config) - diff_analysis.run_analysis_optimisation(fileset) + diff_analysis = DifferentiableAnalysis(config, processed_datasets) + diff_analysis.run_analysis_optimisation() if __name__ == "__main__": diff --git a/analysis/base.py b/analysis/base.py index 2acdff6..6c4141b 100644 --- a/analysis/base.py +++ b/analysis/base.py @@ -60,7 +60,7 @@ def is_jagged(array_like: ak.Array) -> bool: class Analysis: """Base class for physics analysis implementations.""" - def __init__(self, config: Dict[str, Any]) -> None: + def __init__(self, config: Dict[str, Any], processed_datasets: Optional[Dict[str, List[Tuple[Any, Dict[str, Any]]]]] = None) -> None: """ Initialize analysis with configuration for systematics, corrections, and channels. @@ -73,11 +73,14 @@ def __init__(self, config: Dict[str, Any]) -> None: - 'corrections': Correction configurations - 'channels': Analysis channel definitions - 'general': General settings including output directory + processed_datasets : Optional[Dict[str, List[Tuple[Any, Dict[str, Any]]]]], optional + Pre-processed datasets from skimming, by default None """ self.config = config self.channels = config.channels self.systematics = config.systematics self.corrections = config.corrections + self.processed_datasets = processed_datasets self.corrlib_evaluators = self._load_correctionlib() self.dirs = self._prepare_dirs() diff --git a/analysis/diff.py b/analysis/diff.py index 89de41d..c4e2f55 100644 --- a/analysis/diff.py +++ b/analysis/diff.py @@ -26,7 +26,7 @@ import uproot import vector from coffea.analysis_tools import PackedSelection -from coffea.nanoevents import NanoAODSchema, NanoEventsFactory +from coffea.nanoevents import NanoAODSchema from jaxopt import OptaxSolver from tabulate import tabulate @@ -35,9 +35,11 @@ # ============================================================================= from analysis.base import Analysis from user.cuts import lumi_mask -# from utils.jax_stats import build_channel_data_scalar, compute_discovery_pvalue -from utils.evm_stats import build_channel_data_scalar, compute_discovery_pvalue, fit_params -from utils.logging import BLUE, GREEN, RED, RESET, _banner +#from utils.jax_stats import build_channel_data_scalar, compute_discovery_pvalue +from utils.evm_stats import fit_params, build_channel_data_scalar, compute_discovery_pvalue +from utils.logging import BLUE, GREEN, RED, RESET, log_banner, get_console +from rich.table import Table +from rich.text import Text from utils.mva import JAXNetwork, TFNetwork from utils.plot import ( create_cms_histogram, @@ -46,7 +48,6 @@ plot_parameters_over_iterations, plot_pvalue_vs_parameters, ) -from utils.preproc import pre_process_dak, pre_process_uproot from utils.tools import nested_defaultdict_to_dict, recursive_to_backend @@ -62,6 +63,7 @@ NanoAODSchema.warn_missing_crossrefs = False warnings.filterwarnings("ignore", category=FutureWarning, module="coffea.*") +jax.config.update("jax_enable_x64", True) # ----------------------------------------------------------------------------- # Utility functions @@ -102,17 +104,17 @@ def merge_histograms( def infer_processes_and_systematics( - fileset: dict[str, dict[str, Any]], + processed_datasets: dict[str, list[tuple[Any, dict[str, Any]]]], systematics_config: list[dict[str, Any]], corrections_config: list[dict[str, Any]], ) -> tuple[list[str], list[str]]: """ - Extract all unique process and systematic names from the config and fileset. + Extract all unique process and systematic names from the config and processed datasets. Parameters ---------- - fileset : dict - Dataset structure with 'metadata' dictionaries including process names. + processed_datasets : dict + Dictionary mapping dataset names to lists of (events, metadata) tuples. systematics_config : list Configuration entries for systematic variations. corrections_config : list @@ -123,11 +125,12 @@ def infer_processes_and_systematics( tuple[list[str], list[str]] Sorted list of process names and systematic variation base names. """ - # Pull out all process names from the fileset metadata + # Pull out all process names from the processed datasets metadata process_names = { metadata.get("process") - for dataset in fileset.values() - if (metadata := dataset.get("metadata")) and metadata.get("process") + for events_list in processed_datasets.values() + for events, metadata in events_list + if metadata.get("process") } # Extract systematic names from both systematics and corrections configs @@ -235,23 +238,27 @@ def _log_parameter_update( p_value_change_initial = 0.0 p_value_row.append(f"{p_value_change_initial:+.2f}%") - # Colour green for improvement (decrease), red for worsening (increase) + # Get console for Rich output + console = get_console() + + # Create Rich Table for p-values + p_value_table = Table(show_header=True, header_style="bold") + for header in p_value_headers: + p_value_table.add_column(header) + + # Determine color for p-value row if new_p_value < old_p_value: - p_value_row_coloured = [ - f"{GREEN}{item}{RESET}" for item in p_value_row - ] + # Green for improvement (decrease) + colored_row = [Text(item, style="green") for item in p_value_row] elif new_p_value > old_p_value: - p_value_row_coloured = [f"{RED}{item}{RESET}" for item in p_value_row] + # Red for worsening (increase) + colored_row = [Text(item, style="red") for item in p_value_row] else: - p_value_row_coloured = p_value_row - p_value_table = tabulate( - [p_value_row_coloured], headers=p_value_headers, tablefmt="grid" - ) + colored_row = [Text(item, style="white") for item in p_value_row] - # --- Parameter Table --- - table_data = [] - headers = ["Parameter", "Old Value", "New Value", "% Change"] + p_value_table.add_row(*colored_row) + # --- Parameter Table --- # Create a map from MVA name to its config for easy lookup mva_config_map = {mva.name: mva for mva in mva_configs or []} @@ -264,6 +271,15 @@ def _log_parameter_update( else {} ) + # Create Rich Table for parameters + param_headers = ["Parameter", "Old Value", "New Value", "% Change"] + if initial_params: + param_headers.append("% Change from Initial") + + param_table = Table(show_header=True, header_style="bold") + for header in param_headers: + param_table.add_column(header) + for name, old_val in sorted(all_old_params.items()): new_val = all_new_params[name] initial_val = all_initial_params.get(name, 0.0) @@ -307,8 +323,8 @@ def _log_parameter_update( else: percent_change = float("inf") if new_param != 0 else 0.0 - # Format for table - row = [ + # Create row data + row_data = [ name_display, f"{old_param:.4f}", f"{new_param:.4f}", @@ -316,8 +332,7 @@ def _log_parameter_update( ] # Calculate percentage change from initial - if initial_param is None: - headers.append("% Change from Initial") + if initial_params: if initial_param != 0: percent_change_from_initial = ( (new_param - initial_param) / initial_param @@ -326,15 +341,17 @@ def _log_parameter_update( percent_change_from_initial = ( float("inf") if new_param != 0 else 0.0 ) + row_data.append(f"{percent_change_from_initial:+.2f}%") - row.append(f"{percent_change_from_initial:+.2f}%") - - # Colour the row blue if the parameter value has changed + # Color the row green if the parameter value has changed, otherwise white if not np.allclose(old_param, new_param, atol=1e-6, rtol=1e-5): - row = [f"{BLUE}{item}{RESET}" for item in row] + colored_row = [Text(item, style="green") for item in row_data] + else: + colored_row = [Text(item, style="white") for item in row_data] - table_data.append(row) + param_table.add_row(*colored_row) + # Format header if isinstance(step, int): header = f"STEP {step:3d}" else: @@ -343,12 +360,18 @@ def _log_parameter_update( else: header = "" - if not table_data: - logger.info(f"\n{header}\n{p_value_table}\n(No parameters to log)") - return + # Print using Rich console directly + if header: + console.print(f"\n{header}") - table_str = tabulate(table_data, headers=headers, tablefmt="grid") - logger.info(f"\n{header}\n{p_value_table}\n{table_str}\n") + console.print(p_value_table) + + if param_table.row_count > 0: + console.print(param_table) + else: + console.print("(No parameters to log)") + + console.print() # Add newline # ----------------------------------------------------------------------------- @@ -696,16 +719,18 @@ class DifferentiableAnalysis(Analysis): - Training MVA models using JAX or TensorFlow frameworks. """ - def __init__(self, config: dict[str, Any]) -> None: + def __init__(self, config: dict[str, Any], processed_datasets: Optional[Dict[str, List[Tuple[Any, Dict[str, Any]]]]] = None) -> None: """ - Initialise the DifferentiableAnalysis with configuration. + Initialise the DifferentiableAnalysis with configuration and processed datasets. Parameters ---------- config : dict Analysis configuration dictionary. + processed_datasets : Optional[Dict[str, List[Tuple[Any, Dict[str, Any]]]]], optional + Pre-processed datasets from skimming, by default None """ - super().__init__(config) + super().__init__(config, processed_datasets) # Histogram storage: # histograms[variation][region][observable] = jnp.ndarray @@ -748,10 +773,6 @@ def _prepare_dirs(self) -> None: ) cache.mkdir(parents=True, exist_ok=True) - # Optional: directory to store preprocessed inputs for later reuse - preproc = self.config.general.get("preprocessed_dir") - if preproc: - Path(preproc).mkdir(parents=True, exist_ok=True) # Directory for trained MVA models mva = self.dirs["output"] / "mva_models" @@ -773,7 +794,6 @@ def _prepare_dirs(self) -> None: self.dirs.update( { "cache": cache, - "preproc": Path(preproc) if preproc else None, "mva_models": mva, "optimisation_plots": optimisation_plots, "fit_plots": fit_plots, @@ -781,23 +801,15 @@ def _prepare_dirs(self) -> None: } ) - def _log_config_summary(self, fileset: dict[str, Any]) -> None: + def _log_config_summary(self) -> None: """Logs a structured summary of the key analysis configuration options.""" - logger.info(_banner("Differentiable Analysis Configuration Summary")) + logger.info(log_banner("Differentiable Analysis Configuration Summary")) # --- General Settings --- general_cfg = self.config.general general_data = [ ["Output Directory", general_cfg.output_dir], - [ - "Max Files per Sample", - ( - "All" - if general_cfg.max_files == -1 - else general_cfg.max_files - ), - ], - ["Run Preprocessing", general_cfg.run_preprocessing], + ["Run Skimming", general_cfg.run_skimming], ["Run MVA Pre-training", general_cfg.run_mva_training], ["Run Systematics", general_cfg.run_systematics], ["Run Plots Only", general_cfg.run_plots_only], @@ -805,7 +817,8 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: ] logger.info( "General Settings:\n" - + tabulate(general_data, tablefmt="grid", stralign="left") + + tabulate(general_data, tablefmt="rounded_outline", stralign="left") + + "\n" ) # --- Channels --- @@ -819,28 +832,25 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: + tabulate( channel_data, headers=["Name", "Fit Observable"], - tablefmt="grid", + tablefmt="rounded_outline", ) + + "\n" ) # --- Processes --- - processes = sorted( - list( - { - content["metadata"]["process"] - for content in fileset.values() - } - ) - ) - if self.config.general.processes: - processes = [ - p for p in processes if p in self.config.general.processes - ] - processes_data = [[p] for p in processes] - logger.info( - "Processes Included:\n" - + tabulate(processes_data, headers=["Process"], tablefmt="grid") - ) + if self.processed_datasets: + processes = sorted(list({ + metadata["process"] + for events_list in self.processed_datasets.values() + for events, metadata in events_list + })) + if self.config.general.processes: + processes = [p for p in processes if p in self.config.general.processes] + processes_data = [[p] for p in processes] + logger.info("Processes Included:\n" + + tabulate(processes_data, headers=["Process"], tablefmt="rounded_outline") + + "\n" + ) # --- Systematics --- if self.config.general.run_systematics: @@ -855,8 +865,8 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: + tabulate( syst_data, headers=["Systematic", "Type"], - tablefmt="grid", - ) + tablefmt="rounded_outline") + + "\n" ) if not self.config.jax: @@ -872,7 +882,8 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: ] logger.info( "Optimisation Settings:\n" - + tabulate(jax_data, tablefmt="grid", stralign="left") + + tabulate(jax_data, tablefmt="rounded_outline", stralign="left") + + "\n" ) # --- Optimisable Parameters --- @@ -888,7 +899,8 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: if params_data: logger.info( "Initial Optimisable Parameters:\n" - + tabulate(params_data, headers=headers, tablefmt="grid") + + tabulate(params_data, headers=headers, tablefmt="rounded_outline") + + "\n" ) # --- Learning Rates --- @@ -901,8 +913,9 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: + tabulate( lr_data, headers=["Parameter", "Learning Rate"], - tablefmt="grid", + tablefmt="rounded_outline", ) + + "\n" ) # --- MVA Models --- @@ -935,7 +948,8 @@ def _log_config_summary(self, fileset: dict[str, Any]) -> None: ) logger.info( "MVA Models:\n" - + tabulate(mva_data, headers=headers, tablefmt="grid") + + tabulate(mva_data, headers=headers, tablefmt="rounded_outline") + + "\n" ) # ------------------------------------------------------------------------- @@ -1050,9 +1064,8 @@ def apply_selection( for channel in self.channels: channel_name = channel.name - if not channel.use_in_diff: - logger.warning( + logger.debug( f"Skipping channel {channel_name} in diff analysis" ) continue @@ -1061,7 +1074,7 @@ def apply_selection( if ( req := self.config.general.channels ) and channel_name not in req: - logger.warning( + logger.debug( f"Skipping channel {channel_name} (not in requested channels)" ) continue @@ -1077,7 +1090,7 @@ def apply_selection( # Count number of events passing selection n_events = ak.sum(mask) if n_events == 0: - logger.warning( + logger.debug( f"No events left in {channel_name} for {process} after selection" ) continue @@ -1407,20 +1420,19 @@ def _histogramming( return histograms for channel in self.channels: + channel_name = channel.name # Skip channels not participating in differentiable analysis if not channel.use_in_diff: - warning_logger( - f"Skipping channel {channel.name} (use_in_diff=False)" + logger.debug( + f"Skipping channel {channel_name}" ) continue - channel_name = channel.name - # Skip if channel is not listed in requested channels if ( req := self.config.general.channels ) and channel_name not in req: - warning_logger( + logger.debug( f"Skipping channel {channel_name} (not in requested channels)" ) continue @@ -1477,6 +1489,7 @@ def _histogramming( f"Applied {event_syst.name} {direction} correction" ) + weights = jnp.asarray(ak.to_jax(weights)) # Loop over observables and compute KDE-based histograms @@ -1493,7 +1506,7 @@ def _histogramming( info_logger( f"Histogramming: {process} | {variation} | {channel_name} | " f"{obs_name} | Events (Raw): {nevents:,} | " - f"Events(Weighted): {ak.sum(weights):,.2f}" + f"Events (Weighted): {ak.sum(weights):,.2f}" ) # Evaluate observable function @@ -1719,10 +1732,9 @@ def _run_traced_analysis_chain( """ info_logger = logger.info if not silent else logger.debug info_logger( - _banner( - "📊 Starting histogram collection and p-value calculation..." - ) - ) + log_banner( + "📊 Starting histogram collection and p-value calculation..." + )) histograms_by_process = defaultdict(dict) # ------------------------------------------------------------------------- @@ -1731,11 +1743,6 @@ def _run_traced_analysis_chain( for dataset_name, dataset_files in processed_data_events.items(): process_name = dataset_name.split("___")[1] - info_logger( - f" ⏳ Processing dataset: {dataset_name} " - f"(process: {process_name}, files: {len(dataset_files)})" - ) - # Ensure process histogram container exists if process_name not in histograms_by_process: histograms_by_process[process_name] = defaultdict( @@ -1745,16 +1752,11 @@ def _run_traced_analysis_chain( # --------------------------------------------------------------------- # Loop over files in the dataset # --------------------------------------------------------------------- - info_logger(" 🔍 Collecting histograms for dataset...") for file_key, variations in dataset_files.items(): for variation_name, ( processed_data, metadata, ) in variations.items(): - logger.debug( - f" • Collecting histograms for file: {file_key} " - f"({variation_name})" - ) # Build histograms for this file and variation file_histograms = self._collect_histograms( @@ -1771,7 +1773,7 @@ def _run_traced_analysis_chain( # Compute statistical p-value from histograms # ------------------------------------------------------------------------- info_logger( - " ✅ Histogram collection complete. Starting p-value calculation..." + "✅ Histogram collection complete. Starting p-value calculation..." ) pvalue, aux = self._calculate_pvalue( histograms_by_process, params["fit"], silent=silent @@ -1788,21 +1790,18 @@ def _run_traced_analysis_chain( def _prepare_data( self, params: dict[str, Any], - fileset: dict[str, Any], read_from_cache: bool = False, run_and_cache: bool = True, cache_dir: Optional[str] = "/tmp/gradients_analysis/", recreate_fit_params: bool = False, ) -> dict[str, dict[str, dict[str, Any]]]: """ - Run full analysis on all datasets in fileset with caching support. + Run full analysis on processed datasets with caching support. Parameters ---------- params : dict Analysis parameters. - fileset : dict - Dictionary mapping dataset names to file and metadata. read_from_cache : bool Read preprocessed events from cache. run_and_cache : bool @@ -1820,21 +1819,30 @@ def _prepare_data( all_channel_names = { f"Channel: {c.name}" for c in config.channels if c.use_in_diff } - summary_data = [] - logger.info(_banner("Preparing and Caching Data")) + summary_data = [] + logger.info(log_banner("Processing skimmed data")) # Prepare dictionary to collect MVA training data mva_data: dict[str, dict[str, list[Tuple[dict, int]]]] = defaultdict( lambda: defaultdict(list) ) - # Loop over datasets in the fileset - for dataset, content in fileset.items(): - metadata = content["metadata"] - metadata["dataset"] = dataset - process_name = metadata["process"] + # Use processed datasets from skimming + if not self.processed_datasets: + raise ValueError("No processed datasets available for analysis") + # Loop over processed datasets + for dataset, events_list in self.processed_datasets.items(): + # Get metadata from first event in the list + if not events_list: + continue + _, metadata = events_list[0] + process_name = metadata["process"] + logger.info( + f"Processing dataset: {dataset} (process: {process_name}, " + f"files: {len(events_list)})" + ) # Skip datasets not explicitly requested in config if (req := config.general.processes) and process_name not in req: logger.info( @@ -1844,225 +1852,115 @@ def _prepare_data( dataset_stats = defaultdict(int) - # Loop over ROOT files associated with the dataset - for idx, (file_path, tree) in enumerate(content["files"].items()): - # Honour file limit if set in configuration - if ( - config.general.max_files != -1 - and idx >= config.general.max_files - ): - logger.info( - f"Reached max files limit ({config.general.max_files})" - ) - break - - # Determine output directory for preprocessed files - output_dir = ( - f"output/{dataset}/file__{idx}/" - if not config.general.preprocessed_dir - else f"{config.general.preprocessed_dir}/{dataset}/file__{idx}/" - ) - - # Preprocess ROOT files into skimmed format using uproot or dask - if config.general.run_preprocessing: - if config.general.preprocessor == "uproot": - pre_process_uproot( - file_path, - tree, - output_dir, - config, - is_mc=(dataset != "data"), - ) - elif config.general.preprocessor == "dask": - pre_process_dak( - file_path, - tree, - output_dir + f"/part{idx}.root", - config, - is_mc=(dataset != "data"), - ) - - # Discover skimmed files and summarise retained events - skimmed_files = glob.glob(f"{output_dir}/part*.root") - skimmed_files = [f"{f}:{tree}" for f in skimmed_files] - remaining = sum( - uproot.open(f).num_entries for f in skimmed_files - ) - dataset_stats["Skimmed"] += remaining - - # Loop over skimmed files for further processing and caching - for skimmed in skimmed_files: - cache_key = hashlib.md5(skimmed.encode()).hexdigest() - cache_file = os.path.join( - cache_dir, f"{dataset}__{cache_key}.pkl" + # Loop over events in the processed dataset + for idx, (events, file_metadata) in enumerate(events_list): + + # Count skimmed events + dataset_stats["Skimmed"] += len(events) + + # Run preprocessing pipeline and store processed results + processed_data, stats = self._prepare_data_for_tracing(events, process_name) + all_events[f"{dataset}___{process_name}"][f"file__{idx}"][f"events_{idx}"] = (processed_data, file_metadata) + + dataset_stats["Baseline (Analysis)"] += stats["baseline_analysis"] + dataset_stats["Baseline (MVA)"] += stats["baseline_mva"] + for ch, count in stats["channels"].items(): + ch_name = f"Channel: {ch}" + dataset_stats[ch_name] += count + + # ------------------------------------------------------ + # If MVA training is enabled, collect data for MVA models + # ------------------------------------------------------ + # Helper to extract class name and associated process names + def parse_class_entry(entry: Union[str, dict[str, list[str]]]) -> tuple[str, list[str]]: + """ + Parse MVA class entry to extract class name and associated process names. + + Parameters + ---------- + entry : Union[str, dict[str, list[str]]] + MVA class entry, either a string (process name) or a dictionary + mapping class name to list of process names. + + Returns + ------- + tuple[str, list[str]] + A tuple containing: + - class_name: Name of the MVA class + - process_names: List of process names associated with this class + + Raises + ------ + ValueError + If entry is neither a string nor a dictionary. + """ + if isinstance(entry, str): + return entry, [entry] + if isinstance(entry, dict): + return next(iter(entry.items())) + raise ValueError(f"Invalid MVA class type: {type(entry)}. \ + Allowed types are str or dict.") + + # Helper to record MVA data + def record_mva_entry( + mva_data: dict[str, dict[str, list[tuple[dict, int]]]], + cfg_name: str, + class_label: str, + presel_ch: dict[str, Any], + process_name: str + ) -> None: + """ + Record MVA training data for a specific class and process. + + Parameters + ---------- + mva_data : dict[str, dict[str, list[tuple[dict, int]]]] + Nested dictionary storing MVA training data, structured as: + mva_data[config_name][class_name] = [(objects_dict, event_count), ...] + cfg_name : str + Name of the MVA configuration. + class_label : str + Label for the MVA class (e.g., 'signal', 'background'). + presel_ch : dict[str, Any] + Preselection channel data containing 'mva_objects' and 'mva_nevents'. + process_name : str + Name of the physics process being recorded. + + Returns + ------- + None + Modifies mva_data in place by appending new training data. + """ + nevents = presel_ch["mva_nevents"] + logger.debug( + f"Adding {nevents} events from process '{process_name}' to MVA class '{class_label}'." ) - - # Handle caching: process and cache, read from cache, or skip - if run_and_cache: - logger.info( - f"Processing {skimmed} and caching results" - ) - events = NanoEventsFactory.from_root( - skimmed, schemaclass=NanoAODSchema, delayed=False - ).events() - with open(cache_file, "wb") as f: - cloudpickle.dump(events, f) - elif read_from_cache: - logger.info(f"Reading cached events for {skimmed}") - if os.path.exists(cache_file): - with open(cache_file, "rb") as f: - events = cloudpickle.load(f) - else: - logger.warning( - f"Cache file not found: {cache_file}" - ) - logger.info( - f"Processing {skimmed} and caching results" - ) - events = NanoEventsFactory.from_root( - skimmed, - schemaclass=NanoAODSchema, - delayed=False, - ).events() - with open(cache_file, "wb") as f: - cloudpickle.dump(events, f) - else: - logger.info( - f"Processing {skimmed} but *not* caching results" - ) - events = NanoEventsFactory.from_root( - skimmed, schemaclass=NanoAODSchema, delayed=False - ).events() - - # Run preprocessing pipeline and store processed results - processed_data, stats = self._prepare_data_for_tracing( - events, process_name + mva_data[cfg_name][class_label].append( + (presel_ch["mva_objects"], nevents) ) - all_events[f"{dataset}___{process_name}"][f"file__{idx}"][ - skimmed - ] = (processed_data, metadata) - - dataset_stats["Baseline (Analysis)"] += stats[ - "baseline_analysis" - ] - dataset_stats["Baseline (MVA)"] += stats["baseline_mva"] - for ch, count in stats["channels"].items(): - ch_name = f"Channel: {ch}" - dataset_stats[ch_name] += count - - # ------------------------------------------------------ - # If MVA training is enabled, collect data for MVA models - # ------------------------------------------------------ - # Helper to extract class name and associated process names - def parse_class_entry( - entry: Union[str, dict[str, list[str]]], - ) -> tuple[str, list[str]]: - """ - Parse MVA class entry to extract class name and associated - process names. - - Parameters - ---------- - entry : Union[str, dict[str, list[str]]] - MVA class entry, either a string (process name) or a - dictionary mapping class name to list of process names. - - Returns - ------- - tuple[str, list[str]] - A tuple containing: - - class_name: Name of the MVA class - - process_names: List of process names associated with this - class - - Raises - ------ - ValueError - If entry is neither a string nor a dictionary. - """ - if isinstance(entry, str): - return entry, [entry] - if isinstance(entry, dict): - return next(iter(entry.items())) - raise ValueError( - f"Invalid MVA class type: {type(entry)}. \ - Allowed types are str or dict." - ) - # Helper to record MVA data - def record_mva_entry( - mva_data: dict[str, dict[str, list[tuple[dict, int]]]], - cfg_name: str, - class_label: str, - presel_ch: dict[str, Any], - process_name: str, - ) -> None: - """ - Record MVA training data for a specific class and process. - - Parameters - ---------- - mva_data : dict[str, dict[str, list[tuple[dict, int]]]] - Nested dictionary storing MVA training data, structured as: - mva_data[config_name][class_name] - = [(objects_dict, event_count), ...] - cfg_name : str - Name of the MVA configuration. - class_label : str - Label for the MVA class (e.g., 'signal', 'background'). - presel_ch : dict[str, Any] - Preselection channel data containing 'mva_objects' and - 'mva_nevents'. - process_name : str - Name of the physics process being recorded. - - Returns - ------- - None - Modifies mva_data in place by appending new training data. - """ - nevents = presel_ch["mva_nevents"] - logger.debug( - f"Adding {nevents} events from process '{process_name}' " - f"to MVA class '{class_label}'." - ) - mva_data[cfg_name][class_label].append( - (presel_ch["mva_objects"], nevents) - ) + # Collect training data for MVA, if enabled + if config.mva and config.general.run_mva_training: + nominal = processed_data.get("nominal", {}) + presel_ch = nominal.get("__presel") + if presel_ch: + for mva_cfg in config.mva: + seen = set() # track classes to avoid duplicates + # iterate training and plot classes in order + for entry in chain(mva_cfg.classes, mva_cfg.plot_classes): + class_name, proc_names = parse_class_entry(entry) + # fallback default + if not class_name or not proc_names: + class_name = process_name + proc_names = [process_name] + # skip duplicates + if class_name in seen: + continue + seen.add(class_name) + # record only if this process applies + if process_name in proc_names: + record_mva_entry(mva_data, mva_cfg.name, class_name, presel_ch, process_name) - # Collect training data for MVA, if enabled - if config.mva and config.general.run_mva_training: - nominal = processed_data.get("nominal", {}) - presel_ch = nominal.get("__presel") - if presel_ch: - for mva_cfg in config.mva: - seen = ( - set() - ) # track classes to avoid duplicates - # iterate training and plot classes in order - for entry in chain( - mva_cfg.classes, mva_cfg.plot_classes - ): - class_name, proc_names = parse_class_entry( - entry - ) - # fallback default - if not class_name or not proc_names: - class_name = process_name - proc_names = [process_name] - # skip duplicates - if class_name in seen: - continue - seen.add(class_name) - # record only if this process applies - if process_name in proc_names: - record_mva_entry( - mva_data, - mva_cfg.name, - class_name, - presel_ch, - process_name, - ) row = {"Dataset": dataset, "Process": process_name} row.update(dataset_stats) @@ -2085,7 +1983,7 @@ def record_mva_entry( + tabulate( [formatted_row], headers=headers, - tablefmt="grid", + tablefmt="rounded_outline", stralign="right", ) + "\n" @@ -2110,9 +2008,9 @@ def record_mva_entry( table_data.append(formatted_row) logger.info( - "📊 Data Preparation Summary\n" + "📊 Data Processing Summary\n" + tabulate( - table_data, headers=headers, tablefmt="grid", stralign="right" + table_data, headers=headers, tablefmt="rounded_outline", stralign="right" ) + "\n" ) @@ -2130,7 +2028,7 @@ def record_mva_entry( self.config.general.run_mva_training and (mva_cfg := self.config.mva) is not None ): - logger.info(_banner("Executing MVA Pre-training")) + logger.info(log_banner("Executing MVA Pre-training")) models, nets = self._run_mva_training(mva_data) # Save trained models and attach to processed data @@ -2165,7 +2063,7 @@ def record_mva_entry( # Cut Optimisation via Gradient Ascent # ------------------------------------------------------------------------- def run_analysis_optimisation( - self, fileset: dict[str, dict[str, Any]] + self ) -> Tuple[dict[str, jnp.ndarray], jnp.ndarray]: """ Perform gradient-based optimisation of analysis selection cuts and @@ -2189,7 +2087,7 @@ def run_analysis_optimisation( - Final JAX scalar p-value """ # Log a summary of the configuration being used for this run - self._log_config_summary(fileset) + self._log_config_summary() cache_dir = "/tmp/gradients_analysis/" # --------------------------------------------------------------------- # If not just plotting, begin gradient-based optimisation chain @@ -2215,7 +2113,6 @@ def run_analysis_optimisation( processed_data, mva_models, mva_nets, mva_data = ( self._prepare_data( all_parameters, - fileset, read_from_cache=read_from_cache, run_and_cache=run_and_cache, cache_dir=cache_dir, @@ -2241,9 +2138,7 @@ def run_analysis_optimisation( # --------------------------------------------------------------------- # 3. Run initial traced analysis to compute KDE histograms # --------------------------------------------------------------------- - logger.info( - _banner("Running initial p-value computation (traced)") - ) + logger.info(log_banner("Running initial p-value computation (traced)")) initial_pvalue, (mle_parameters, mle_parameters_uncertainties) = self._run_traced_analysis_chain( all_parameters, processed_data ) @@ -2265,7 +2160,7 @@ def run_analysis_optimisation( # Collect relevant processes and systematics # ---------------------------------------------------------------------- processes, systematics = infer_processes_and_systematics( - fileset, self.config.systematics, self.config.corrections + self.processed_datasets, self.config.systematics, self.config.corrections ) logger.info(f"Processes: {processes}") logger.info(f"Systematics: {systematics}") @@ -2273,9 +2168,7 @@ def run_analysis_optimisation( # ---------------------------------------------------------------------- # Compute gradients to seed optimiser # ---------------------------------------------------------------------- - logger.info( - _banner("Computing parameter gradients before optimisation") - ) + logger.info(log_banner("Computing parameter gradients before optimisation")) (_, _), gradients = jax.value_and_grad( self._run_traced_analysis_chain, @@ -2286,7 +2179,7 @@ def run_analysis_optimisation( # ---------------------------------------------------------------------- # Prepare for optimisation # ---------------------------------------------------------------------- - logger.info(_banner("Preparing for parameter optimisation")) + logger.info(log_banner("Preparing for parameter optimisation")) # Define objective for optimiser (p-value to minimise) def objective( @@ -2346,7 +2239,7 @@ def objective( )(all_parameters) # Set up optimisation loop - logger.info(_banner("Beginning parameter optimisation")) + logger.info(log_banner("Beginning parameter optimisation")) initial_params = all_parameters.copy() pval_history = [] aux_history = { @@ -2444,7 +2337,7 @@ def optimise_and_log( ) # Log final summary table comparing initial and final states - logger.info(_banner("Optimisation results")) + logger.info(log_banner("Optimisation results")) _log_parameter_update( step="", old_p_value=float(initial_pvalue), @@ -2457,16 +2350,14 @@ def optimise_and_log( # ---------------------------------------------------------------------- # Prepare post-optimisation histograms # ---------------------------------------------------------------------- - logger.info( - _banner( - "Running analysis chain with optimised parameters (untraced)" - ) - ) + logger.info(log_banner( + "Running analysis chain with optimised parameters (untraced)" + )) # Run the traced analysis chain with final parameters _ = self._run_traced_analysis_chain(final_params, processed_data) logger.info( - _banner("Re-computing NN scores/process for MVA models") + log_banner("Re-computing NN scores/process for MVA models") ) # Compute MVA scores with optimised parameters @@ -2519,7 +2410,7 @@ def optimise_and_log( with open(path, "wb") as f: pickle.dump(jax.tree.map(np.array, optimised_nn_params), f) - logger.info(_banner("Making plots and summaries")) + logger.info(log_banner("Making plots and summaries")) # --------------------------------------------------------------------- # 4. Reload results and generate summary plots # --------------------------------------------------------------------- @@ -2539,7 +2430,6 @@ def optimise_and_log( final_mva_scores = results["final_mva_scores"] initial_mva_scores = results["initial_mva_scores"] - logger.info(_banner("Generating parameter evolution plots")) # Generate optimisation progress plots if self.config.jax.explicit_optimisation: logger.info("Generating parameter history plots") diff --git a/analysis/nondiff.py b/analysis/nondiff.py index 87b0929..a6e38f0 100644 --- a/analysis/nondiff.py +++ b/analysis/nondiff.py @@ -4,7 +4,7 @@ import warnings from collections import defaultdict from typing import Any, Literal, Optional - +from typing import Any, Literal, Optional, Dict, List, Tuple import awkward as ak import cabinetry import hist @@ -20,7 +20,6 @@ save_histograms_to_pickle, save_histograms_to_root, ) -from utils.preproc import pre_process_dak, pre_process_uproot from utils.stats import get_cabinetry_rebinning_router # ----------------------------- @@ -44,18 +43,19 @@ # ----------------------------- class NonDiffAnalysis(Analysis): - def __init__(self, config: dict[str, Any]) -> None: + def __init__(self, config: dict[str, Any], processed_datasets: Optional[Dict[str, List[Tuple[Any, Dict[str, Any]]]]] = None) -> None: """ - Initialize ZprimeAnalysis with configuration for systematics, corrections, - and channels. + Initialize ZprimeAnalysis with configuration and processed datasets. Parameters ---------- config : dict Configuration dictionary with 'systematics', 'corrections', 'channels', and 'general'. + processed_datasets : Optional[Dict[str, List[Tuple[Any, Dict[str, Any]]]]], optional + Pre-processed datasets from skimming, by default None """ - super().__init__(config) + super().__init__(config, processed_datasets) self.nD_hists_per_region = self._init_histograms() def _prepare_dirs(self): @@ -382,77 +382,30 @@ def run_fit( return data, results, prefit_prediction, postfit_prediction - def run_analysis_chain(self, fileset): + def run_analysis_chain(self): + """ + Run the complete non-differentiable analysis chain using pre-processed datasets. + """ config = self.config - for dataset, content in fileset.items(): - metadata = content["metadata"] - metadata["dataset"] = dataset - process_name = metadata["process"] - if (req_processes := config.general.processes) is not None: - if process_name not in req_processes: - continue - os.makedirs( - f"{config.general.output_dir}/{dataset}", exist_ok=True - ) + if not self.processed_datasets: + raise ValueError("No processed datasets provided to analysis") + # Loop over processed datasets + for dataset_name, events_list in self.processed_datasets.items(): + os.makedirs(f"{config.general.output_dir}/{dataset_name}", exist_ok=True) logger.info("========================================") - logger.info(f"🚀 Processing dataset: {dataset}") + logger.info(f"🚀 Processing dataset: {dataset_name}") - for idx, (file_path, tree) in enumerate(content["files"].items()): - output_dir = ( - f"output/{dataset}/file__{idx}/" - if not config.general.preprocessed_dir - else f"{config.general.preprocessed_dir}/{dataset}/file__{idx}/" - ) - if ( - config.general.max_files != -1 - and idx >= config.general.max_files - ): - continue - if config.general.run_preprocessing: - logger.info(f"🔍 Preprocessing input file: {file_path}") - logger.info(f"➡️ Writing to: {output_dir}") - if config.general.preprocessor == "uproot": - pre_process_uproot( - file_path, - tree, - output_dir, - config, - is_mc=(dataset != "data"), - ) - elif config.general.preprocessor == "dask": - pre_process_dak( - file_path, - tree, - output_dir + f"/part{idx}.root", - config, - is_mc=(dataset != "data"), - ) - - skimmed_files = glob.glob(f"{output_dir}/part*.root") - skimmed_files = [f"{f}:{tree}" for f in skimmed_files] - remaining = sum( - uproot.open(f).num_entries for f in skimmed_files - ) - logger.info( - f"✅ Events retained after filtering: {remaining:,}" - ) - if config.general.run_histogramming: - for skimmed in skimmed_files: - logger.info(f"📘 Processing skimmed file: {skimmed}") - logger.info( - "📈 Processing for non-differentiable analysis" - ) - events = NanoEventsFactory.from_root( - skimmed, schemaclass=NanoAODSchema, delayed=False - ).events() - self.process(events, metadata) - logger.info( - "📈 Non-differentiable histogram-filling complete." - ) - - logger.info(f"🏁 Finished dataset: {dataset}\n") + # Process each (events, metadata) tuple + if config.general.run_histogramming: + for events, metadata in events_list: + logger.info(f"📘 Processing events for {dataset_name}") + logger.info("📈 Processing for non-differentiable analysis") + self.process(events, metadata) + logger.info("📈 Non-differentiable histogram-filling complete.") + + logger.info(f"🏁 Finished dataset: {dataset_name}\n") # Report end of processing logger.info("✅ All datasets processed.") diff --git a/environment.yml b/environment.yml index 1da87cd..cca1cdb 100644 --- a/environment.yml +++ b/environment.yml @@ -1,295 +1,39 @@ -name: zprime_diff_analysis +name: zprime_env channels: - - defaults - conda-forge + - defaults dependencies: - - bzip2=1.0.8=h80987f9_6 - - ca-certificates=2025.2.25=hca03da5_0 - - libffi=3.4.4=hca03da5_1 - - ncurses=6.4=h313beb8_0 - - openssl=3.0.16=h02f6b3c_0 - - pip=25.1=pyhc872135_2 - - python=3.11.11=hb885b13_0 - - readline=8.2=h1a28f6b_0 - - setuptools=78.1.1=py311hca03da5_0 - - sqlite=3.45.3=h80987f9_0 - - tk=8.6.14=h6ba3021_0 - - wheel=0.45.1=py311hca03da5_0 - - xz=5.6.4=h80987f9_1 - - zlib=1.2.13=h18a0788_1 + - python=3.11 + - xrootd + - openssl>=3.5 + - jax + - jaxlib + - numpy + - scipy + - pandas + - matplotlib + - uproot + - vector + - coffea + - tensorflow + - scikit-learn + - pip - pip: - - absl-py==2.2.2 - - aiofile==3.9.0 - - aiohappyeyeballs==2.4.0 - - aiohttp==3.10.5 - - aiohttp-retry==2.9.0 - - aiosignal==1.3.1 - - annotated-types==0.7.0 - - antlr4-python3-runtime==4.9.3 - - anyio==4.4.0 - - appnope==0.1.4 - - argon2-cffi==23.1.0 - - argon2-cffi-bindings==21.2.0 - - arrow==1.3.0 - - asttokens==2.4.1 - - astunparse==1.6.3 - - async-lru==2.0.4 - - async-timeout==4.0.3 - - attrs==24.2.0 - - awkward==2.8.2 - - awkward-cpp==45 - - babel==2.16.0 - - beautifulsoup4==4.12.3 - - black==25.1.0 - - bleach==6.1.0 - - bokeh==3.6.0 - - boost-histogram==1.5.0 - - cabinetry==0.6.0 - - cachetools==5.5.0 - - caio==0.9.17 - - ccorp-yaml-include-relative-path==0.0.4 - - celluloid==0.2.0 - - certifi==2024.8.30 - - cffi==1.17.0 - - cfgv==3.4.0 - - charset-normalizer==3.3.2 - - chex==0.1.89 - - click==8.1.7 - - cloudpickle==3.0.0 - - coffea==2024.8.2 - - colorama==0.4.6 - - comm==0.2.2 - - contourpy==1.3.0 - - correctionlib==2.7.0 - - cramjam==2.8.3 - - cycler==0.12.1 - - dask==2024.8.1 - - dask-awkward==2025.5.0 - - dask-histogram==2025.2.0 - - debugpy==1.8.5 - - decorator==5.1.1 - - defusedxml==0.7.1 - - distlib==0.3.9 - - distributed==2024.8.1 - - docdantic==0.3.1 - - equinox==0.12.2 - - etils==1.12.2 - - evermore==0.2.10 - - exceptiongroup==1.2.2 - - executing==2.0.1 - - fastjsonschema==2.20.0 - - filelock==3.16.1 - - flake8==7.3.0 - - flatbuffers==25.2.10 - - fonttools==4.53.1 - - fqdn==1.5.1 - - frozenlist==1.4.1 - - fsspec==2024.6.1 - - fsspec-xrootd==0.3.0 - - func-adl==3.3.3 - - func-adl-servicex==2.2 - - gast==0.6.0 - - gitdb==4.0.11 - - gitpython==3.1.43 - - google-api-core==2.24.2 - - google-auth==2.35.0 - - google-auth-oauthlib==1.2.1 - - google-cloud-core==2.4.3 - - google-cloud-storage==3.1.0 - - google-crc32c==1.7.1 - - google-pasta==0.2.0 - - google-resumable-media==2.7.2 - - googleapis-common-protos==1.69.2 - - griffe==1.8.0 - - griffe-pydantic==1.1.4 - - grpcio==1.71.0 - - h11==0.14.0 - - h5py==3.13.0 - - hist==2.8.0 - - histoprint==2.4.0 - - httpcore==1.0.5 - - httpx==0.27.2 - - identify==2.6.1 - - idna==3.8 - - iminuit==2.30.1 - - importlib-metadata==8.4.0 - - iniconfig==2.0.0 - - ipykernel==6.29.5 - - ipython==8.27.0 - - ipywidgets==8.1.5 - - isoduration==20.11.0 - - jax==0.6.0 - - jaxlib==0.6.0 - - jaxopt==0.8.5 - - jaxtyping==0.3.2 - - jedi==0.19.1 - - jinja2==3.1.4 - - joblib==1.5.0 - - json5==0.9.25 - - jsonpatch==1.33 - - jsonpointer==3.0.0 - - jsonschema==4.23.0 - - jsonschema-specifications==2023.12.1 - - jupyter==1.1.1 - - jupyter-client==8.6.2 - - jupyter-console==6.6.3 - - jupyter-core==5.7.2 - - jupyter-events==0.10.0 - - jupyter-lsp==2.2.5 - - jupyter-server==2.14.2 - - jupyter-server-mathjax==0.2.6 - - jupyter-server-terminals==0.5.3 - - jupyterlab==4.2.5 - - jupyterlab-pygments==0.3.0 - - jupyterlab-server==2.27.3 - - jupyterlab-widgets==3.0.13 - - keras==3.9.2 - - kiwisolver==1.4.5 - - lark==1.2.2 - - libclang==18.1.1 - - llvmlite==0.43.0 - - locket==1.0.0 - - lz4==4.3.3 - - make-it-sync==2.0.0 - - markdown==3.8 - - markdown-it-py==3.0.0 - - markupsafe==2.1.5 - - matplotlib==3.9.2 - - matplotlib-inline==0.1.7 - - mccabe==0.7.0 - - mdurl==0.1.2 - - miniopy-async==1.21.1 - - mistune==3.0.2 - - ml-dtypes==0.5.1 - - mplhep==0.3.51 - - mplhep-data==0.0.3 - - msgpack==1.1.0 - - multidict==6.0.5 - - mypy==1.13.0 - - mypy-extensions==1.0.0 - - namex==0.0.9 - - nbclient==0.10.0 - - nbconvert==7.16.4 - - nbdime==4.0.2 - - nbformat==5.10.4 - - neos==0.1.dev203+gfd52e3f.d20250515 - - nest-asyncio==1.6.0 - - nodeenv==1.9.1 - - notebook==7.2.2 - - notebook-shim==0.2.4 - - numba==0.60.0 - - numpy==1.26.0 - - oauthlib==3.2.2 - - omegaconf==2.3.0 - - opt-einsum==3.3.0 - - optax==0.2.4 - - optree==0.15.0 - - overrides==7.7.0 - - packaging==24.1 - - pandas==2.2.2 - - pandocfilters==1.5.1 - - parso==0.8.4 - - partd==1.4.2 - - pathspec==0.12.1 - - pexpect==4.9.0 - - pillow==10.4.0 - - platformdirs==4.2.2 - - pluggy==1.5.0 - - pre-commit==4.0.1 - - prometheus-client==0.20.0 - - prompt-toolkit==3.0.47 - - propcache==0.3.1 - - proto-plus==1.26.1 - - protobuf==5.29.4 - - psutil==6.0.0 - - ptyprocess==0.7.0 - - pure-eval==0.2.3 - - pyarrow==17.0.0 - - pyarrow-hotfix==0.6 - - pyasn1==0.6.1 - - pyasn1-modules==0.4.1 - - pycodestyle==2.14.0 - - pycparser==2.22 - - pydantic==2.8.2 - - pydantic-core==2.20.1 - - pyflakes==3.4.0 - - pygments==2.18.0 - - pyhf==0.7.6 - - pyparsing==3.1.4 - - pytest==8.3.3 - - python-dateutil==2.9.0.post0 - - python-json-logger==2.0.7 - - pytz==2024.1 - - pyyaml==6.0.2 - - pyzmq==26.2.0 - - qastle==0.18.0 - - referencing==0.35.1 - - relaxed==0.4.0 - - requests==2.32.3 - - requests-oauthlib==2.0.0 - - rfc3339-validator==0.1.4 - - rfc3986-validator==0.1.1 - - rich==13.8.0 - - rpds-py==0.20.0 - - rsa==4.9 - - ruamel-yaml==0.18.6 - - ruamel-yaml-clib==0.2.12 - - scikit-learn==1.6.1 - - scipy==1.14.1 - - send2trash==1.8.3 - - servicex==3.0.0 - - shellingham==1.5.4 - - six==1.16.0 - - smmap==5.0.1 - - sniffio==1.3.1 - - sortedcontainers==2.4.0 - - soupsieve==2.6 - - stack-data==0.6.3 - - tabulate==0.9.0 - - tblib==3.0.0 - - tenacity==9.0.0 - - tensorboard==2.19.0 - - tensorboard-data-server==0.7.2 - - tensorflow==2.19.0 - - tensorflow-io-gcs-filesystem==0.37.1 - - tensorflow-metal==1.2.0 - - termcolor==3.1.0 - - terminado==0.18.1 - - threadpoolctl==3.6.0 - - tinycss2==1.3.0 - - tinydb==4.8.2 - - toml==0.10.2 - - tomli==2.0.1 - - toolz==0.12.1 - - tornado==6.4.1 - - tqdm==4.66.5 - - traitlets==5.14.3 - - treescope==0.1.9 - - typer==0.12.5 - - types-python-dateutil==2.9.0.20240821 - - types-pyyaml==6.0.12.20240917 - - typing-extensions==4.12.2 - - typing-inspection==0.4.0 - - tzdata==2024.1 - - uhi==0.4.0 - - uproot==5.3.12 - - uri-template==1.3.0 - - urllib3==2.2.2 - - vector==1.5.1 - - virtualenv==20.27.1 - - wadler-lindig==0.1.6 - - wcwidth==0.2.13 - - webcolors==24.8.0 - - webencodings==0.5.1 - - websocket-client==1.8.0 - - werkzeug==3.1.3 - - widgetsnbextension==4.0.13 - - wrapt==1.17.2 - - xgboost==2.1.2 - - xrootd==5.8.2 - - xxhash==3.5.0 - - xyzservices==2024.9.0 - - yarl==1.9.4 - - zict==3.0.0 - - zipp==3.20.1 -prefix: /Users/moaly/miniforge3/envs/lino_zprime_jax + - wadler-lindig + - tensorflow-metal + - tensorflow-io-gcs-filesystem + - stack-data + - relaxed + - pydantic-core + - pyasn1-modules + - notebook-shim + - mypy-extensions + - msgpack + - mplhep-data + - jupyter-events + - jupyter-console + - func-adl-servicex + - fsspec-xrootd + - fastjsonschema + - docdantic + - awkward-cpp diff --git a/cabinetry/cabinetry_config.yaml b/nondiff_stats/cabinetry/cabinetry_config.yaml similarity index 100% rename from cabinetry/cabinetry_config.yaml rename to nondiff_stats/cabinetry/cabinetry_config.yaml diff --git a/nondiff_stats/workspace.json b/nondiff_stats/workspace.json new file mode 100644 index 0000000..a0dca0e --- /dev/null +++ b/nondiff_stats/workspace.json @@ -0,0 +1,703 @@ +{ + "channels": [ + { + "name": "Zprime_channel", + "samples": [ + { + "data": [ + 4.376100696623325, + 89.37979253008962, + 168.2321730069816, + 330.14789878204465, + 1158.7584372907877, + 2007.6806884668767, + 2278.544808000326, + 2149.986529044807, + 1804.0268701985478, + 1390.8569100871682, + 1040.8927062638104, + 776.5101698376238, + 557.7464189752936, + 411.14704563841224, + 296.5014641806483, + 220.6215294599533, + 158.77814414352179, + 121.29230044037104, + 90.7421635016799, + 69.60477145761251, + 50.07745419815183, + 40.25186961516738, + 30.426285032182932, + 21.963071420788765 + ], + "modifiers": [ + { + "data": [ + 0.42903558937297065, + 1.9333067060625753, + 2.6508680388924533, + 3.714410241307777, + 6.94945446232057, + 9.165320023097497, + 9.791526994952111, + 9.535613798829688, + 8.768543934421091, + 7.731463461596482, + 6.720416417875934, + 5.822210664512438, + 4.946157170303136, + 4.274236778905779, + 3.641423914436692, + 3.145719230489383, + 2.6614555116243857, + 2.330991006449778, + 2.0267023115609764, + 1.7795258306496307, + 1.5330756107447898, + 1.348539435985825, + 1.1756874130471262, + 0.999379069499909 + ], + "name": "staterror_Zprime_channel", + "type": "staterror" + }, + { + "data": { + "hi": 0.9875060351244633, + "lo": 0.9818689030959243 + }, + "name": "MU_ID_SF", + "type": "normsys" + }, + { + "data": { + "hi_data": [ + 4.374668206158063, + 89.32000474456898, + 168.11788148592262, + 330.0625335668744, + 1158.5333685353012, + 2007.665907733479, + 2278.554099171746, + 2150.050481951117, + 1804.095648420421, + 1390.9630716327085, + 1041.009864969114, + 776.586013872833, + 557.7426069309979, + 411.1514628103685, + 296.5373443674536, + 220.62475015884132, + 158.79764772945933, + 121.28426760591142, + 90.74490836897819, + 69.60596247584438, + 50.08547670328651, + 40.24957748850879, + 30.4268460625119, + 21.961207277061575 + ], + "lo_data": [ + 4.377226123585218, + 89.37385422691521, + 168.20146469455793, + 330.1748172780924, + 1158.7946358133256, + 2007.798492859975, + 2278.480637826969, + 2149.9321919973536, + 1803.9753042992293, + 1390.821306304611, + 1040.9698403391017, + 776.5074528290054, + 557.708321818791, + 411.13356855436746, + 296.53624240291924, + 220.612502132975, + 158.8007538420012, + 121.28203355584328, + 90.74186597992887, + 69.6037309245273, + 50.088479606688985, + 40.24506956681937, + 30.422804171839008, + 21.963005120047377 + ] + }, + "name": "MU_ID_SF", + "type": "histosys" + } + ], + "name": "ttbar_semilep" + }, + { + "data": [ + 1.804945733398199, + 6.637542374432087, + 8.617160275578499, + 9.548745170235634, + 16.12806348875165, + 22.94027803093195, + 20.37841957062483, + 14.84713425859809, + 12.40172391012311, + 7.802023492753506, + 5.531285312026739, + 5.414837200194597, + 3.5516674108803272, + 3.4934433549642563, + 1.7467216774821281, + 1.3391532860696316, + 1.2227051742374897, + 0.6404646150767803, + 0.4657924473285675, + 0.5822405591607094, + 0.40756839141249657, + 0.34934433549642563, + 0.2911202795803547, + 0.17467216774821281 + ], + "modifiers": [ + { + "data": [ + 0.3241778235889223, + 0.627092273695713, + 0.7083262113765286, + 0.7501644846930212, + 0.9725334707667654, + 1.1615662673960803, + 1.1047237878467124, + 0.9513889128604758, + 0.8576939017784555, + 0.6889163201902571, + 0.5966190343664572, + 0.5614924611431136, + 0.4694173459467109, + 0.48012786498765864, + 0.3293649981315078, + 0.27923276277741543, + 0.27923276277741543, + 0.19310734724627984, + 0.1841206313077857, + 0.1841206313077857, + 0.1540463722754427, + 0.14261922774964997, + 0.1301929469540834, + 0.10084702306936613 + ], + "name": "staterror_Zprime_channel", + "type": "staterror" + }, + { + "data": { + "hi": 0.9881480100489056, + "lo": 0.9819572685300455 + }, + "name": "MU_ID_SF", + "type": "normsys" + }, + { + "data": { + "hi_data": [ + 1.8030771142903745, + 6.625016187113526, + 8.619667154408743, + 9.55319327853785, + 16.120626443106097, + 22.94148229030818, + 20.378263549162273, + 14.854583657128693, + 12.407330421132997, + 7.802848031695636, + 5.5272897770938405, + 5.417252299830831, + 3.5505981534023046, + 3.4898293867538634, + 1.746930365842573, + 1.3416595542260399, + 1.2231200895343741, + 0.6411464299349079, + 0.46609280551639215, + 0.5839688540347643, + 0.4076071312984296, + 0.34937117982057764, + 0.29121851673339405, + 0.17487984617957364 + ], + "lo_data": [ + 1.8052593604417748, + 6.6330453719634255, + 8.625839812658755, + 9.55678032520422, + 16.121894295214684, + 22.93834517597065, + 20.371479935096275, + 14.84733619735821, + 12.408289336824973, + 7.801686994294314, + 5.525425314365993, + 5.41902364594751, + 3.549018329153717, + 3.4863865866040524, + 1.74722794017627, + 1.3423036556270214, + 1.2217226114666975, + 0.6413756059386417, + 0.4664259831032435, + 0.5841713510231675, + 0.4079360052559434, + 0.3497099744795933, + 0.2913612531900434, + 0.17500745572716342 + ] + }, + "name": "MU_ID_SF", + "type": "histosys" + } + ], + "name": "ttbar_had" + }, + { + "data": [ + 0.6249373815953732, + 14.603799864649773, + 49.962099086493254, + 174.5219866707921, + 351.0503512509167, + 432.9829311221838, + 387.16515361890197, + 315.72494347020984, + 237.93668518215418, + 179.0938969887793, + 135.38117171823978, + 102.12792472913861, + 77.42645243555307, + 55.9154499322176, + 43.94296535849571, + 35.720105074346066, + 26.247370027005672, + 19.965104769915342, + 14.505125541239977, + 13.25525077804923, + 8.946471989154816, + 7.137442726641893, + 5.196847699582577, + 4.3416702300310135 + ], + "modifiers": [ + { + "data": [ + 0.14337046802184983, + 0.6992816305460412, + 1.2945195433694927, + 2.410742945033199, + 3.419919127166374, + 3.8071754315724338, + 3.607727824325228, + 3.2609006701395447, + 2.8374468506237847, + 2.465103250887817, + 2.1397140185032275, + 1.8568376532252389, + 1.632025449527137, + 1.3814405277371336, + 1.2262818562870537, + 1.1007581605636167, + 0.9441619806128986, + 0.8262235738144394, + 0.7107900550231163, + 0.666812624293658, + 0.5542964185451869, + 0.5042162447898197, + 0.42377657486820425, + 0.3807459580949904 + ], + "name": "staterror_Zprime_channel", + "type": "staterror" + }, + { + "data": { + "hi": 0.9874676320842061, + "lo": 0.9818716509639374 + }, + "name": "MU_ID_SF", + "type": "normsys" + }, + { + "data": { + "hi_data": [ + 0.6247205423478993, + 14.594507778289225, + 49.92512445214722, + 174.4509440178052, + 351.0061072164685, + 432.98242680695773, + 387.1860905975065, + 315.7581287274557, + 237.9552923545186, + 179.12517507232528, + 135.4070573945022, + 102.14547001079306, + 77.4232184795585, + 55.920341720768484, + 43.94625489571731, + 35.719286791609775, + 26.254608825705617, + 19.962850498520776, + 14.505536943357717, + 13.25598759240812, + 8.949461610113591, + 7.138626182973517, + 5.194581046375133, + 4.344338088062176 + ], + "lo_data": [ + 0.625100473565013, + 14.602408762425496, + 49.95022605288153, + 174.51661768994487, + 351.0754431074655, + 432.99562110229397, + 387.1503879189443, + 315.7071876740838, + 237.9380076665776, + 179.09364041321157, + 135.3884221856859, + 102.13715479943775, + 77.41655156183276, + 55.9182335706183, + 43.94272840287694, + 35.7157420099855, + 26.250125561533803, + 19.96649561671011, + 14.505234659048552, + 13.253860498965413, + 8.95075654029339, + 7.137075754174638, + 5.194554417747872, + 4.344561205983895 + ] + }, + "name": "MU_ID_SF", + "type": "histosys" + } + ], + "name": "ttbar_lep" + }, + { + "data": [ + 0.0, + 0.6531646437942982, + 2.208318557590246, + 2.6748647317290306, + 3.016998592764139, + 3.29692629724741, + 6.002894107252359, + 10.326221987605095, + 17.759857695549726, + 29.205790501087904, + 43.32658803835511, + 59.18915795907378, + 73.58988320082426, + 83.29404362291098, + 91.22532858327031, + 91.59856552258134, + 92.74937941879034, + 86.34214529395103, + 78.59747880324721, + 58.13165329769254, + 45.037257343530655, + 28.645935092121363, + 16.48463148623705, + 9.455335795879364 + ], + "modifiers": [ + { + "data": [ + 0.0, + 0.14253221054553522, + 0.26207919595972107, + 0.2884380819664127, + 0.3063297951376594, + 0.3202257901750292, + 0.43209777284767714, + 0.5667250574793705, + 0.7432269129100887, + 0.9530949522819949, + 1.1608575533567578, + 1.3568216585413062, + 1.512901813572494, + 1.609565518617113, + 1.6844549669468303, + 1.6878973171999243, + 1.6984673114621622, + 1.6387515075777939, + 1.5635291924064765, + 1.3446461849968903, + 1.1835528465127882, + 0.9439156537832898, + 0.716046634979947, + 0.5423007001510635 + ], + "name": "staterror_Zprime_channel", + "type": "staterror" + }, + { + "data": null, + "name": "zprime_norm", + "type": "normfactor" + }, + { + "data": { + "hi": 0.9873839898155702, + "lo": 0.9819007080821183 + }, + "name": "MU_ID_SF", + "type": "normsys" + }, + { + "data": { + "hi_data": [ + 0.0, + 0.6528271745117055, + 2.206149409539619, + 2.6736587817179043, + 3.0167894424222417, + 3.2969723591477034, + 6.005864398438007, + 10.328156440067575, + 17.760439084714754, + 29.212023335928492, + 43.33986012254677, + 59.19854708401759, + 73.6040304051511, + 83.30550207813553, + 91.24180200468354, + 91.61270115359362, + 92.73476360661381, + 86.33271528178011, + 78.58122740395005, + 58.12131956915305, + 45.01889462115989, + 28.640920814856646, + 16.479078823274143, + 9.448177177681748 + ], + "lo_data": [ + 0.0, + 0.6531179155801419, + 2.2072507316745056, + 2.674557842559687, + 3.01746937227223, + 3.297177126937015, + 6.005011248316647, + 10.324986055540277, + 17.753556612329426, + 29.20811828127596, + 43.328286211600464, + 59.18207391853604, + 73.58378946066382, + 83.29181578492137, + 91.23800652415576, + 91.60739563860237, + 92.7480066997366, + 86.34653027739672, + 78.59643385348436, + 58.133695349701775, + 45.03128081157491, + 28.64908033858799, + 16.483072637247826, + 9.45170788038963 + ] + }, + "name": "MU_ID_SF", + "type": "histosys" + } + ], + "name": "signal" + }, + { + "data": [ + 0.0, + 12.463685035705566, + 0.0, + 12.463685035705566, + 24.927370071411133, + 62.318424224853516, + 87.24579429626465, + 62.31842517852783, + 87.24579524993896, + 62.31842517852783, + 37.391056060791016, + 62.31842613220215, + 24.927370071411133, + 24.927370071411133, + 0.0, + 12.463685989379883, + 37.3910551071167, + 0.0, + 12.463685035705566, + 12.463685035705566, + 12.463685035705566, + 0.0, + 0.0, + 0.0 + ], + "modifiers": [ + { + "data": [ + 0.0, + 12.463685035705566, + 0.0, + 12.463685035705566, + 17.626312414641404, + 27.869646563488434, + 32.97581066345912, + 27.86964698998454, + 32.97581102391412, + 27.86964698998454, + 21.587736281982096, + 27.869647416480674, + 17.626312414641404, + 17.626312414641404, + 0.0, + 12.463685989379883, + 21.587735731377958, + 0.0, + 12.463685035705566, + 12.463685035705566, + 12.463685035705566, + 0.0, + 0.0, + 0.0 + ], + "name": "staterror_Zprime_channel", + "type": "staterror" + }, + { + "data": { + "hi": 0.9883042741465439, + "lo": 0.9828667479188841 + }, + "name": "MU_ID_SF", + "type": "normsys" + }, + { + "data": { + "hi_data": [ + 0.0, + 12.448813032380366, + 0.0, + 12.448813032380366, + 24.815662046325055, + 62.410371142807875, + 86.97895168872165, + 62.24525461336865, + 87.39115163637007, + 62.34266597126731, + 37.43078297740937, + 62.32840903580751, + 24.98077953466323, + 24.98077953466323, + 0.0, + 12.366849960209475, + 37.5127460369461, + 0.0, + 12.448813032380366, + 12.531966502282865, + 12.448813032380366, + 0.0, + 0.0, + 0.0 + ], + "lo_data": [ + 0.0, + 12.454315655842162, + 0.0, + 12.454315655842162, + 24.831166622064824, + 62.40200868649801, + 87.01303025169656, + 62.25932927220864, + 87.37585663874714, + 62.33505263383078, + 37.41591460108912, + 62.32454590778338, + 24.973846994301656, + 24.973846994301656, + 0.0, + 12.376851913252764, + 37.49337833276115, + 0.0, + 12.454315655842162, + 12.519531338459494, + 12.454315655842162, + 0.0, + 0.0, + 0.0 + ] + }, + "name": "MU_ID_SF", + "type": "histosys" + } + ], + "name": "wjets" + } + ] + } + ], + "measurements": [ + { + "config": { + "parameters": [ + { + "bounds": [ + [ + 0, + 10 + ] + ], + "inits": [ + 1.0 + ], + "name": "zprime_norm" + } + ], + "poi": "zprime_norm" + }, + "name": "CMS_Zprime" + } + ], + "observations": [ + { + "data": [ + 17.0, + 92.0, + 163.0, + 333.0, + 822.0, + 1292.0, + 1435.0, + 1212.0, + 1092.0, + 850.0, + 604.0, + 423.0, + 326.0, + 239.0, + 220.0, + 142.0, + 92.0, + 77.0, + 72.0, + 46.0, + 37.0, + 15.0, + 12.0, + 11.0 + ], + "name": "Zprime_channel" + } + ], + "version": "1.0.0" +} \ No newline at end of file diff --git a/pixi.lock b/pixi.lock index fba5d1c..72ac7d2 100644 --- a/pixi.lock +++ b/pixi.lock @@ -38,27 +38,27 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.7.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/boost-histogram-1.6.1-py312h0a2e395_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h1289d80_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.5-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.8.3-hbd8a1cb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cabinetry-0.6.0-pyhff2d567_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.2.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.8.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h35888ee_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.90-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.91-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2024.11.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2025.7.3-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/correctionlib-2.7.0-py312ha04a795_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cramjam-2.11.0-py312h848b54d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cramjam-2.11.0-py312h848b54d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.3.0-pyhd8ed1ab_0.conda @@ -71,26 +71,26 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/etils-1.12.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/evermore-0.3.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.59.1-py312h8a5da7c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.59.2-py312h8a5da7c_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.13.3-ha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-xrootd-0.5.1-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-2.9.0-pyhb7efba9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-base-2.9.0-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/histoprint-2.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/iminuit-2.31.1-py312h8285ef7_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/iminuit-2.31.1-py312h8285ef7_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.7.0-h40b2b14_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.4.0-pyhfa0c392_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.5.0-pyhfa0c392_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jax-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/jaxlib-0.7.0-cpu_py312h73730d4_0.conda @@ -98,13 +98,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jaxtyping-0.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.25.1-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.44-h1423503_1.conda @@ -114,11 +114,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-20.0.0-hcb10f89_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-20.0.0-hcb10f89_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-20.0.0-h1bed206_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-34_h59b9bed_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-34_he106b2a_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-35_h59b9bed_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-35_he106b2a_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.14.1-h332b0f4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.24-h86f0d12_0.conda @@ -129,19 +129,19 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.13.3-ha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.13.3-h48d6fc4_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.36.0-hc4361e1_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.36.0-h0121fbd_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.71.0-h8e591d7_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.0-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-34_h7ac8fdf_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-35_h7ac8fdf_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.21.0-hd1b1c89_0.conda @@ -152,8 +152,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2025.06.26-hba17884_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.50.4-h0c1763c_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-h8261f1e_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.10.0-h202a827_0.conda @@ -164,22 +164,22 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.8-h2cb61b6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lineax-0.0.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312h374181b_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312he100287_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.4-py312hf0f0c11_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.4-py312h5d89b6d_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.5-py312he3d6523_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.6-py312he3d6523_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.5.1-py312hf9745cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.5.1-py312hf79963d_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep_data-0.0.4-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.1-py312h68727a3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.1-py312hd9148b4_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.6.3-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.1.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h3f2d84a_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.61.2-py312h7bcfee6_1.conda @@ -193,13 +193,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.1.2-h17f744e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.2-py312hf79963d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py312h80c1187_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py312h0e488c8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.51-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.0.0-py312h4c3975b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda @@ -228,10 +228,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rich-14.1.0-pyhe01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.27.0-py312h868fb18_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.27.1-py312h868fb18_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.22-h96f233e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.7.1-py312h4f0b9e3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.1-py312h4ebe9ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.1-py312h7a1785b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scitokens-cpp-1.1.3-h6ac2c77_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda @@ -244,17 +244,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_hd72426e_102.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.2-py312h4c3975b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.2-py312h4c3975b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.14.1-h4440ef1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.10-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.14.1-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025b-h78e105d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uhi-1.0.0-pyhcf101f3_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.3-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h4c3975b_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.4-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vector-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wadler-lindig-0.1.7-pyhe01879c_0.conda @@ -269,7 +269,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h66e93f0_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.24.0-py312h3fa7853_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb8e6e7a_2.conda - pypi: git+https://github.com/pfackeldey/relaxed.git?branch=fixes_for_zprime#b5f5ca674cf49e6d48f60a53a75bc35db636894d osx-arm64: @@ -301,27 +301,27 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.13.0-h7585a09_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.8.0-h9ca1f76_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.12.0-hcdd55da_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.7.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/boost-histogram-1.6.1-py312hdc12c9d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312hd8f9ff3_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312h6b01ec3_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.5-h5505292_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.8.3-hbd8a1cb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cabinetry-0.6.0-pyhff2d567_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.2.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.8.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h0fad829_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h429097b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.90-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.91-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2024.11.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2025.7.3-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.3-py312ha0dd364_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.3-py312ha0dd364_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/correctionlib-2.7.0-py312h674a3be_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cramjam-2.11.0-py312h82c2aec_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cramjam-2.11.0-py312h82c2aec_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.0.1-py312hea69d52_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.3.0-pyhd8ed1ab_0.conda @@ -334,15 +334,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/etils-1.12.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/evermore-0.3.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.59.1-py312h6daa0e5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.59.2-py312h6daa0e5_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.13.3-hce30654_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/frozenlist-1.7.0-py312h512c567_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-xrootd-0.5.1-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-2.9.0-pyhb7efba9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-base-2.9.0-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/histoprint-2.6.0-pyhd8ed1ab_0.conda @@ -350,11 +350,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-75.1-hfee45f7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/iminuit-2.31.1-py312he360a15_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/iminuit-2.31.1-py312he360a15_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.7.0-h40b2b14_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.4.0-pyhfa0c392_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.5.0-pyhfa0c392_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jax-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jaxlib-0.7.0-cpu_py312h1f4f324_0.conda @@ -362,12 +362,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jaxtyping-0.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.25.1-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.9-py312hdc12c9d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.9-py312hdc12c9d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.21.3-h237132a_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.17-h7eeda09_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.0.0-hd64df32_1.conda @@ -376,14 +376,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-20.0.0-hf07054f_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-20.0.0-hf07054f_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-20.0.0-he749cb8_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-34_h10e41b3_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-34_hb3479ef_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-35_h10e41b3_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-35_hb3479ef_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.14.1-h73640d1_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-20.1.8-hf598326_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-21.1.0-hf598326_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.24-h5773f1b_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda @@ -392,16 +392,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.6-h1da3d7d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype-2.13.3-hce30654_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype6-2.13.3-h1d14073_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.1.0-hfdf1602_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.1.0-hb74de2c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.1.0-hfdf1602_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.1.0-hb74de2c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.36.0-h9484b08_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.36.0-h7081f7f_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.71.0-h857da87_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.1.0-h5505292_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-34_hc9a63f6_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-35_hc9a63f6_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.1-h39f12f2_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.64.0-h6d7220d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.67.0-hc438710_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.30-openmp_h60d53f8_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.21.0-h0181452_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.21.0-hce30654_0.conda @@ -420,23 +420,23 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.8-h4a9ca0c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lineax-0.0.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-20.1.8-hbb9b287_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312h728bc31_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-21.1.0-hbb9b287_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312hc9b382d_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.4-py312hf263c89_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.4-py312hb64cbc0_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312h998013c_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.5-py312h05635fa_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.6-py312h605b88b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ml_dtypes-0.5.1-py312hcb1e3ce_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ml_dtypes-0.5.1-py312h98f7732_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep_data-0.0.4-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.1-py312hb23fbb9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.1-py312ha0dd364_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/multidict-6.6.3-py312hdb8e49c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.1.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nlohmann_json-3.12.0-ha1acc90_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.61.2-py312h22bc582_1.conda @@ -450,13 +450,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.1.2-hd90e43c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.3.2-py312h98f7732_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.3.0-py312h50aef2c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.3.0-py312hce42e9c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.51-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/propcache-0.3.1-py312h998013c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.0.0-py312h163523d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda @@ -485,9 +485,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rich-14.1.0-pyhe01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.27.0-py312h6f58b40_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.27.1-py312h6f58b40_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.7.1-py312h54d6233_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.16.1-py312h286a95b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.16.1-py312h6e75237_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scitokens-cpp-1.1.3-h9f99f3a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda @@ -500,17 +500,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h892fb3f_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.2-py312h163523d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.2-py312h163523d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.14.1-h4440ef1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.10-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.14.1-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025b-h78e105d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uhi-1.0.0-pyhcf101f3_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.3-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312h163523d_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.4-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vector-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wadler-lindig-0.1.7-pyhe01879c_0.conda @@ -525,7 +525,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.1-h8359307_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.23.0-py312hea69d52_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.24.0-py312h26de6b3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-h6491c7d_2.conda - pypi: git+https://github.com/pfackeldey/relaxed.git?branch=fixes_for_zprime#b5f5ca674cf49e6d48f60a53a75bc35db636894d lab: @@ -537,6 +537,7 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/_x86_64-microarch-level-1-2_x86_64.conda - conda: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.3.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.6.1-pyhd8ed1ab_0.conda @@ -572,40 +573,41 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.4-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.5-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.7.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/boost-histogram-1.6.1-py312h0a2e395_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h1289d80_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.5-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.8.3-hbd8a1cb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cabinetry-0.6.0-pyhff2d567_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.2.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.8.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h35888ee_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.90-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.91-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2024.11.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2025.7.3-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.3-pyhe01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/correctionlib-2.7.0-py312ha04a795_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cramjam-2.11.0-py312h848b54d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.11-py312hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cramjam-2.11.0-py312h848b54d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-awkward-2025.5.0-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-histogram-2025.2.0-pyhe01879c_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.16-py312h8285ef7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.16-py312h8285ef7_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2025.3.0-pyhd8ed1ab_0.conda @@ -613,17 +615,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/etils-1.12.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/evermore-0.3.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.59.1-py312h8a5da7c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.59.2-py312h8a5da7c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.13.3-ha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-xrootd-0.5.1-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-2.9.0-pyhb7efba9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-base-2.9.0-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/histoprint-2.6.0-pyhd8ed1ab_0.conda @@ -632,12 +634,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/iminuit-2.31.1-py312h8285ef7_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/iminuit-2.31.1-py312h8285ef7_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.7.0-h40b2b14_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.30.1-pyh82676e8_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.4.0-pyhfa0c392_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.5.0-pyhfa0c392_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jax-0.7.0-pyhd8ed1ab_0.conda @@ -646,24 +648,24 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jaxtyping-0.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.25.1-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.25.1-he01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.6-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.8.1-pyh31011fe_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.16.0-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.17.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.4.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.4.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lark-1.2.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda @@ -674,11 +676,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-20.0.0-hcb10f89_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-20.0.0-hcb10f89_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-20.0.0-h1bed206_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-34_h59b9bed_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-34_he106b2a_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-35_h59b9bed_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-35_he106b2a_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.14.1-h332b0f4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.24-h86f0d12_0.conda @@ -689,19 +691,19 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.13.3-ha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.13.3-h48d6fc4_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.36.0-hc4361e1_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.36.0-h0121fbd_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.71.0-h8e591d7_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.0-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-34_h7ac8fdf_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-35_h7ac8fdf_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.21.0-hd1b1c89_0.conda @@ -713,8 +715,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.50.4-h0c1763c_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-h8261f1e_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.10.0-h202a827_0.conda @@ -725,23 +727,23 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.8-h2cb61b6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lineax-0.0.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312h374181b_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312he100287_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.4-py312hf0f0c11_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.4-py312h5d89b6d_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.5-py312he3d6523_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.6-py312he3d6523_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.3-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.5.1-py312hf9745cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.5.1-py312hf79963d_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep_data-0.0.4-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.1-py312h68727a3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.1-py312hd9148b4_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.6.3-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.1.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda @@ -763,15 +765,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.2-py312hf79963d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py312h80c1187_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.8-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py312h0e488c8_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.4.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.22.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.51-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.0.0-py312h4c3975b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda @@ -789,13 +791,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.11-h9e4cc4f_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-gil-3.12.11-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h0d868a3_3.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.12-8_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h178313f_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-27.0.2-py312h6748674_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-27.0.2-py312hfb55c3c_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2025.06.26-h9925aae_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8c095d6_2.conda @@ -805,10 +808,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3987-syntax-1.1.0-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rich-14.1.0-pyhe01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.27.0-py312h868fb18_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.27.1-py312h868fb18_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.22-h96f233e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.7.1-py312h4f0b9e3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.1-py312h4ebe9ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.1-py312h7a1785b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scitokens-cpp-1.1.3-h6ac2c77_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda @@ -816,7 +819,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.1.0-pyhd8ed1ab_0.conda @@ -827,19 +830,19 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhe01879c_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.2-py312h4c3975b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.2-py312h4c3975b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.10-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20250822-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.14.1-h4440ef1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.14.1-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025b-h78e105d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uhi-1.0.0-pyhcf101f3_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.3-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h4c3975b_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.4-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vector-1.5.1-pyhd8ed1ab_0.conda @@ -855,14 +858,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.20.1-py312h178313f_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3989a48_8.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h66e93f0_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.24.0-py312h3fa7853_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb8e6e7a_2.conda - pypi: git+https://github.com/pfackeldey/relaxed.git?branch=fixes_for_zprime#b5f5ca674cf49e6d48f60a53a75bc35db636894d osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.3.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aiohttp-3.12.15-py312h6daa0e5_0.conda @@ -898,40 +902,41 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.8.0-h9ca1f76_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.12.0-hcdd55da_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.4-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.5-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.7.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/boost-histogram-1.6.1-py312hdc12c9d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312hd8f9ff3_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312h6b01ec3_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.5-h5505292_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.8.3-hbd8a1cb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cabinetry-0.6.0-pyhff2d567_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.2.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.8.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h0fad829_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h429097b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.90-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.91-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2024.11.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2025.7.3-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.3-pyhe01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.3-py312ha0dd364_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.3-py312ha0dd364_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/correctionlib-2.7.0-py312h674a3be_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cramjam-2.11.0-py312h82c2aec_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.11-py312hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cramjam-2.11.0-py312h82c2aec_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.0.1-py312hea69d52_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-awkward-2025.5.0-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/dask-histogram-2025.2.0-pyhe01879c_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.16-py312he360a15_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.16-py312he360a15_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2025.3.0-pyhd8ed1ab_0.conda @@ -939,17 +944,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/etils-1.12.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/evermore-0.3.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.59.1-py312h6daa0e5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.59.2-py312h6daa0e5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.13.3-hce30654_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/frozenlist-1.7.0-py312h512c567_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-xrootd-0.5.1-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-2.9.0-pyhb7efba9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hist-base-2.9.0-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/histoprint-2.6.0-pyhd8ed1ab_0.conda @@ -959,12 +964,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-75.1-hfee45f7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/iminuit-2.31.1-py312he360a15_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/iminuit-2.31.1-py312he360a15_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.7.0-h40b2b14_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.30.1-pyh92f572d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.4.0-pyhfa0c392_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.5.0-pyhfa0c392_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jax-0.7.0-pyhd8ed1ab_0.conda @@ -973,23 +978,23 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jaxtyping-0.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.25.1-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.25.1-he01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.6-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.8.1-pyh31011fe_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.16.0-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.17.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.4.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.4.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.9-py312hdc12c9d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.9-py312hdc12c9d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.21.3-h237132a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lark-1.2.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.17-h7eeda09_0.conda @@ -999,14 +1004,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-20.0.0-hf07054f_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-20.0.0-hf07054f_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-20.0.0-he749cb8_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-34_h10e41b3_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h5505292_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-34_hb3479ef_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-35_h10e41b3_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-35_hb3479ef_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.14.1-h73640d1_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-20.1.8-hf598326_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-21.1.0-hf598326_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.24-h5773f1b_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda @@ -1015,16 +1020,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.6-h1da3d7d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype-2.13.3-hce30654_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype6-2.13.3-h1d14073_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.1.0-hfdf1602_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.1.0-hb74de2c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.1.0-hfdf1602_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.1.0-hb74de2c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.36.0-h9484b08_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.36.0-h7081f7f_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.71.0-h857da87_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.1.0-h5505292_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-34_hc9a63f6_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-35_hc9a63f6_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.1-h39f12f2_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.64.0-h6d7220d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.67.0-hc438710_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.30-openmp_h60d53f8_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.21.0-h0181452_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.21.0-hce30654_0.conda @@ -1044,24 +1049,24 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.8-h4a9ca0c_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lineax-0.0.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-20.1.8-hbb9b287_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312h728bc31_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-21.1.0-hbb9b287_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312hc9b382d_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.4-py312hf263c89_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.4-py312hb64cbc0_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312h998013c_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.5-py312h05635fa_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.6-py312h605b88b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.3-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ml_dtypes-0.5.1-py312hcb1e3ce_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ml_dtypes-0.5.1-py312h98f7732_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep_data-0.0.4-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.1-py312hb23fbb9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.1-py312ha0dd364_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/multidict-6.6.3-py312hdb8e49c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.1.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.3.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda @@ -1083,15 +1088,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.3.2-py312h98f7732_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.5-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.3.0-py312h50aef2c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.8-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.3.0-py312hce42e9c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.4.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.22.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.51-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/propcache-0.3.1-py312h998013c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.0.0-py312h163523d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda @@ -1104,20 +1109,21 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.33.2-py312hd3c0895_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyhf-0.7.6-pyh29332c3_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.1-py312h4c66426_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.1-py312hb9d441b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.1-py312h4c66426_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.1-py312h3964663_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.3-pyhe01879c_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.11-hc22306f_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-gil-3.12.11-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-xxhash-3.5.0-py312h7a9b006_3.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.12-8_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.2-py312h998013c_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyzmq-27.0.2-py312h211b278_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyzmq-27.0.2-py312hd65ceae_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/qhull-2020.2-h420ef59_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2025.06.26-h6589ca4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h1d1bf99_2.conda @@ -1127,9 +1133,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3987-syntax-1.1.0-pyhe01879c_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rich-14.1.0-pyhe01879c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.27.0-py312h6f58b40_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.27.1-py312h6f58b40_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.7.1-py312h54d6233_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.16.1-py312h286a95b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.16.1-py312h6e75237_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scitokens-cpp-1.1.3-h9f99f3a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda @@ -1137,7 +1143,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.2-hd121638_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.1.0-pyhd8ed1ab_0.conda @@ -1148,19 +1154,19 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhe01879c_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.2-py312h163523d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.2-py312h163523d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.10-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20250822-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.14.1-h4440ef1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.14.1-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025b-h78e105d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uhi-1.0.0-pyhcf101f3_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.3-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312h163523d_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.4-pyhe01879c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vector-1.5.1-pyhd8ed1ab_0.conda @@ -1176,11 +1182,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h925e9cb_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.20.1-py312h998013c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc1bb282_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc0cf3cd_8.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.1-h8359307_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.23.0-py312hea69d52_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.24.0-py312h26de6b3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-h6491c7d_2.conda - pypi: git+https://github.com/pfackeldey/relaxed.git?branch=fixes_for_zprime#b5f5ca674cf49e6d48f60a53a75bc35db636894d packages: @@ -1205,6 +1211,17 @@ packages: purls: [] size: 23621 timestamp: 1650670423406 +- conda: https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda + sha256: a3967b937b9abf0f2a99f3173fa4630293979bd1644709d89580e7c62a544661 + md5: aaa2a381ccc56eac91d63b6c1240312f + depends: + - cpython + - python-gil + license: MIT + license_family: MIT + purls: [] + size: 8191 + timestamp: 1744137672556 - conda: https://conda.anaconda.org/conda-forge/noarch/_x86_64-microarch-level-1-2_x86_64.conda build_number: 2 sha256: 7623b2b804165b458f520371c40f5a607847336a882a55d3cfbdfb6407082794 @@ -1277,7 +1294,7 @@ packages: license: MIT AND Apache-2.0 license_family: Apache purls: - - pkg:pypi/aiohttp?source=compressed-mapping + - pkg:pypi/aiohttp?source=hash-mapping size: 978588 timestamp: 1753805356065 - conda: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda @@ -2009,19 +2026,19 @@ packages: - pkg:pypi/babel?source=hash-mapping size: 6938256 timestamp: 1738490268466 -- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.4-pyha770c72_0.conda - sha256: ddb0df12fd30b2d36272f5daf6b6251c7625d6a99414d7ea930005bbaecad06d - md5: 9f07c4fc992adb2d6c30da7fab3959a7 +- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.5-pyha770c72_0.conda + sha256: d2124c0ea13527c7f54582269b3ae19541141a3740d6d779e7aa95aa82eaf561 + md5: de0fd9702fd4c1186e930b8c35af6b6b depends: - - python >=3.9 + - python >=3.10 - soupsieve >=1.2 - typing-extensions license: MIT license_family: MIT purls: - - pkg:pypi/beautifulsoup4?source=hash-mapping - size: 146613 - timestamp: 1744783307123 + - pkg:pypi/beautifulsoup4?source=compressed-mapping + size: 88278 + timestamp: 1756094375546 - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda sha256: a05971bb80cca50ce9977aad3f7fc053e54ea7d5321523efc7b9a6e12901d3cd md5: f0b4c8e370446ef89797608d60a564b3 @@ -2046,9 +2063,9 @@ packages: purls: [] size: 4213 timestamp: 1737382993425 -- conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.7.3-pyhd8ed1ab_0.conda - sha256: dd116a77a5aca118cfdfcc97553642295a3fb176a4e741fd3d1363ee81cebdfd - md5: 708d2f99b8a2c833ff164a225a265e76 +- conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.8.0-pyhd8ed1ab_0.conda + sha256: 3a0af5b0c30d1e50cda6fea8c7783f3ea925e83f427b059fa81b2f36cde72e28 + md5: 30698cfea774ec175babb8ff08dbc07a depends: - contourpy >=1.2 - jinja2 >=2.9 @@ -2065,8 +2082,8 @@ packages: license_family: BSD purls: - pkg:pypi/bokeh?source=hash-mapping - size: 4934851 - timestamp: 1747091638593 + size: 5020661 + timestamp: 1756543232734 - conda: https://conda.anaconda.org/conda-forge/linux-64/boost-histogram-1.6.1-py312h0a2e395_0.conda sha256: ebd510d0c01d9a78a92757cecdddc7e1b1daac394de483712f954d0d32d96a18 md5: dae288a03d46bb20737c7b78a0746ea6 @@ -2099,92 +2116,92 @@ packages: - pkg:pypi/boost-histogram?source=hash-mapping size: 875180 timestamp: 1755136812318 -- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_3.conda - sha256: c969baaa5d7a21afb5ed4b8dd830f82b78e425caaa13d717766ed07a61630bec - md5: 5d08a0ac29e6a5a984817584775d4131 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb03c661_4.conda + sha256: 294526a54fa13635341729f250d0b1cf8f82cad1e6b83130304cbf3b6d8b74cc + md5: eaf3fbd2aa97c212336de38a51fe404e depends: - __glibc >=2.17,<3.0.a0 - - brotli-bin 1.1.0 hb9d3cd8_3 - - libbrotlidec 1.1.0 hb9d3cd8_3 - - libbrotlienc 1.1.0 hb9d3cd8_3 - - libgcc >=13 + - brotli-bin 1.1.0 hb03c661_4 + - libbrotlidec 1.1.0 hb03c661_4 + - libbrotlienc 1.1.0 hb03c661_4 + - libgcc >=14 license: MIT license_family: MIT purls: [] - size: 19810 - timestamp: 1749230148642 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h5505292_3.conda - sha256: 97e2a90342869cc122921fdff0e6be2f5c38268555c08ba5d14e1615e4637e35 - md5: 03c7865dd4dbf87b7b7d363e24c632f1 + size: 19883 + timestamp: 1756599394934 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h6caf38d_4.conda + sha256: 8aa8ee52b95fdc3ef09d476cbfa30df722809b16e6dca4a4f80e581012035b7b + md5: ce8659623cea44cc812bc0bfae4041c5 depends: - __osx >=11.0 - - brotli-bin 1.1.0 h5505292_3 - - libbrotlidec 1.1.0 h5505292_3 - - libbrotlienc 1.1.0 h5505292_3 + - brotli-bin 1.1.0 h6caf38d_4 + - libbrotlidec 1.1.0 h6caf38d_4 + - libbrotlienc 1.1.0 h6caf38d_4 license: MIT license_family: MIT purls: [] - size: 20094 - timestamp: 1749230390021 -- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_3.conda - sha256: ab74fa8c3d1ca0a055226be89e99d6798c65053e2d2d3c6cb380c574972cd4a7 - md5: 58178ef8ba927229fba6d84abf62c108 + size: 20003 + timestamp: 1756599758165 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb03c661_4.conda + sha256: 444903c6e5c553175721a16b7c7de590ef754a15c28c99afbc8a963b35269517 + md5: ca4ed8015764937c81b830f7f5b68543 depends: - __glibc >=2.17,<3.0.a0 - - libbrotlidec 1.1.0 hb9d3cd8_3 - - libbrotlienc 1.1.0 hb9d3cd8_3 - - libgcc >=13 + - libbrotlidec 1.1.0 hb03c661_4 + - libbrotlienc 1.1.0 hb03c661_4 + - libgcc >=14 license: MIT license_family: MIT purls: [] - size: 19390 - timestamp: 1749230137037 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h5505292_3.conda - sha256: 5c6a808326c3bbb6f015a57c9eb463d65f259f67154f4f06783d8829ce9239b4 - md5: cc435eb5160035fd8503e9a58036c5b5 + size: 19615 + timestamp: 1756599385418 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h6caf38d_4.conda + sha256: e57d402b02c9287b7c02d9947d7b7b55a4f7d73341c210c233f6b388d4641e08 + md5: ab57f389f304c4d2eb86d8ae46d219c3 depends: - __osx >=11.0 - - libbrotlidec 1.1.0 h5505292_3 - - libbrotlienc 1.1.0 h5505292_3 + - libbrotlidec 1.1.0 h6caf38d_4 + - libbrotlienc 1.1.0 h6caf38d_4 license: MIT license_family: MIT purls: [] - size: 17185 - timestamp: 1749230373519 -- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_3.conda - sha256: dc27c58dc717b456eee2d57d8bc71df3f562ee49368a2351103bc8f1b67da251 - md5: a32e0c069f6c3dcac635f7b0b0dac67e + size: 17373 + timestamp: 1756599741779 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h1289d80_4.conda + sha256: 52a9ac412512b418ecdb364ba21c0f3dc96f0abbdb356b3cfbb980020b663d9b + md5: fd0e7746ed0676f008daacb706ce69e4 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 + - libgcc >=14 + - libstdcxx >=14 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 constrains: - - libbrotlicommon 1.1.0 hb9d3cd8_3 + - libbrotlicommon 1.1.0 hb03c661_4 license: MIT license_family: MIT purls: - - pkg:pypi/brotli?source=hash-mapping - size: 351721 - timestamp: 1749230265727 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312hd8f9ff3_3.conda - sha256: 35df7079768b4c51764149c42b14ccc25c4415e4365ecc06c38f74562d9e4d16 - md5: c7c728df70dc05a443f1e337c28de22d + - pkg:pypi/brotli?source=compressed-mapping + size: 354149 + timestamp: 1756599553574 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312h6b01ec3_4.conda + sha256: e45f24660a89c734c3d54f185ecdc359e52a5604d7e0b371e35dce042fa3cf3a + md5: 0d50ab05d6d8fa7a38213c809637ba6d depends: - __osx >=11.0 - - libcxx >=18 + - libcxx >=19 - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython - python_abi 3.12.* *_cp312 constrains: - - libbrotlicommon 1.1.0 h5505292_3 + - libbrotlicommon 1.1.0 h6caf38d_4 license: MIT license_family: MIT purls: - pkg:pypi/brotli?source=hash-mapping - size: 339365 - timestamp: 1749230606596 + size: 341750 + timestamp: 1756600036931 - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d md5: 62ee74e96c5ebb0af99386de58cf9553 @@ -2274,17 +2291,17 @@ packages: - pkg:pypi/cached-property?source=hash-mapping size: 11065 timestamp: 1615209567874 -- conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.1.0-pyhd8ed1ab_0.conda - sha256: b8da50f4b85f267f2369f9f1ac60f9a8dae547140f343023fdf61065fdf7ca0a - md5: f84eb05fa7f862602bfaf4dd844bd61b +- conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-6.2.0-pyhd8ed1ab_0.conda + sha256: 5cdf6c2624ad70baab0374d3a582e302b98d3cbfa7935e0aeab6a1857de0a7a0 + md5: 33a59a2cf83ab89ee546c72254521a4a depends: - - python >=3.9 + - python >=3.10 license: MIT license_family: MIT purls: - pkg:pypi/cachetools?source=hash-mapping - size: 16431 - timestamp: 1750147985559 + size: 16514 + timestamp: 1756198358026 - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.8.3-pyhd8ed1ab_0.conda sha256: a1ad5b0a2a242f439608f22a538d2175cac4444b7b3f4e2b8c090ac337aaea40 md5: 11f59985f49df4620890f3e746ed7102 @@ -2295,13 +2312,13 @@ packages: - pkg:pypi/certifi?source=compressed-mapping size: 158692 timestamp: 1754231530168 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - sha256: cba6ea83c4b0b4f5b5dc59cb19830519b28f95d7ebef7c9c5cf1c14843621457 - md5: a861504bbea4161a9170b85d4d2be840 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h35888ee_1.conda + sha256: 13bf94678e7a853a39a2c6dc2674b096cfe80f43ad03d7fff4bcde05edf9fda4 + md5: 918e2510c64000a916355dcf09d26da2 depends: - __glibc >=2.17,<3.0.a0 - - libffi >=3.4,<4.0a0 - - libgcc >=13 + - libffi >=3.4.6,<3.5.0a0 + - libgcc >=14 - pycparser - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -2309,14 +2326,14 @@ packages: license_family: MIT purls: - pkg:pypi/cffi?source=hash-mapping - size: 294403 - timestamp: 1725560714366 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h0fad829_0.conda - sha256: 8d91a0d01358b5c3f20297c6c536c5d24ccd3e0c2ddd37f9d0593d0f0070226f - md5: 19a5456f72f505881ba493979777b24e + size: 295227 + timestamp: 1756808421998 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h429097b_1.conda + sha256: d6f96b95916d994166d2649374420b11132b33043c68d8681ab9afe29df3fbc3 + md5: 9641dfbf70709463180c574a3a7a0b13 depends: - __osx >=11.0 - - libffi >=3.4,<4.0a0 + - libffi >=3.4.6,<3.5.0a0 - pycparser - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython @@ -2325,8 +2342,8 @@ packages: license_family: MIT purls: - pkg:pypi/cffi?source=hash-mapping - size: 281206 - timestamp: 1725560813378 + size: 287170 + timestamp: 1756808571913 - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.3-pyhd8ed1ab_0.conda sha256: 838d5a011f0e7422be6427becba3de743c78f3874ad2743c341accbba9bb2624 md5: 7e7d5ef1b9ed630e4a1c358d6bc62284 @@ -2338,24 +2355,24 @@ packages: - pkg:pypi/charset-normalizer?source=hash-mapping size: 51033 timestamp: 1754767444665 -- conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.90-pyhd8ed1ab_0.conda - sha256: afaa1913ba6b35a74e0f1d1ecf1ff80a6d727f86675901db0dc1a552d59ab385 - md5: 16d1408b8727d5cabb745b37b6a05207 +- conda: https://conda.anaconda.org/conda-forge/noarch/chex-0.1.91-pyhd8ed1ab_0.conda + sha256: 139d7b38a543ba6c7b8aebaa64e9bfc05dd4f8f56977c3ee740892f76fd49546 + md5: 207c50a7c4059d9235329e59085f06a0 depends: - - absl-py >=0.9.0 - - jax >=0.4.27 - - jaxlib >=0.4.27 + - absl-py >=2.3.1 + - jax >=0.7.0 + - jaxlib >=0.7.0 - numpy >=1.24.1 - - python >=3.9 - - toolz >=0.9.0 + - python >=3.11 + - toolz >=1.0.0 - typing-extensions >=4.2.0 - - typing_extensions >=4.2.0 + - typing_extensions >=4.15.0 license: Apache-2.0 license_family: APACHE purls: - pkg:pypi/chex?source=hash-mapping - size: 81101 - timestamp: 1753385859048 + size: 80820 + timestamp: 1756790803659 - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda sha256: 8aee789c82d8fdd997840c952a586db63c6890b00e88c4fb6e80a38edd5f51c0 md5: 94b550b8d3a614dbd326af798c7dfb40 @@ -2379,41 +2396,41 @@ packages: - pkg:pypi/cloudpickle?source=hash-mapping size: 25870 timestamp: 1736947650712 -- conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2024.11.0-pyhff2d567_0.conda - sha256: 2a3da1def761d431f0ec2513283067e2c0123d5e13c3964a10209a14bbc8b81e - md5: 41d9db9fe41968836548bbb87b52ddf4 +- conda: https://conda.anaconda.org/conda-forge/noarch/coffea-2025.7.3-pyhe01879c_0.conda + sha256: a50dc13ddc5be38b166ff36ec4fc29b0b417e95d065b840aff820bce02927e00 + md5: 7e856501b490d4e81f3a2071456d0acc depends: - - aiohttp - - awkward >=2.6.7 - - cachetools - - cloudpickle >=1.2.3 - - correctionlib >=2.6.0 - - dask-awkward >=2024.9.0 + - python >=3.9 + - awkward >=2.8.2 + - uproot >=5.6.0 - dask-core >=2024.3.0 - - dask-histogram >=2024.9.1 - - fsspec-xrootd >=0.2.3 - - hist >=2 - - lz4 + - dask-awkward >=2025.5.0 + - dask-histogram >=2025.2.0 + - vector >=1.4.1,!=1.6.0 + - correctionlib >=2.6.0 + - pyarrow >=6.0.0,<21.0.0 - matplotlib-base >=3 - - mplhep >=0.1.18 - numba >=0.58.1 - - numpy >=1.22.0 + - numpy >=1.22 + - scipy >=1.1.0 + - tqdm >=4.27.0 + - lz4 + - cloudpickle >=1.2.3 + - toml >=0.10.2 + - mplhep >=0.1.18 - packaging - pandas - - pyarrow >=6.0.0 - - python >=3.9 + - hist >=2 + - cachetools - requests - - scipy >=1.1.0 - - toml >=0.10.2 - - tqdm >=4.27.0 - - uproot >=5.3.11 - - vector >=1.4.1 + - aiohttp + - python license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/coffea?source=hash-mapping - size: 142249 - timestamp: 1732180728179 + size: 191630 + timestamp: 1753651485872 - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda sha256: ab29d57dc70786c1269633ba3dff20288b81664d3ff8d21af995742e2bb03287 md5: 962b9857ee8e7018c22f2776ffa0b2d7 @@ -2434,12 +2451,12 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/comm?source=compressed-mapping + - pkg:pypi/comm?source=hash-mapping size: 14690 timestamp: 1753453984907 -- conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_1.conda - sha256: d9cb7f97a184a383bf0c72e1fa83b983a1caa68d7564f4449a4de7c97df9cb3f - md5: e25ed6c2e3b1effedfe9cd10a15ca8d8 +- conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_2.conda + sha256: cedae3c71ad59b6796d182f9198e881738b7a2c7b70f18427d7788f3173befb2 + md5: bce621e43978c245261c76b45edeaa3d depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 @@ -2450,12 +2467,12 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/contourpy?source=compressed-mapping - size: 291827 - timestamp: 1754063770363 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.3-py312ha0dd364_1.conda - sha256: a51a6f7f7e236cadc45790880dc0b7c91cf6a950277ffe839b689f072783a8d0 - md5: e0b0bffaccf76ef33679dd2e5309442e + - pkg:pypi/contourpy?source=hash-mapping + size: 295534 + timestamp: 1756544766129 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.3-py312ha0dd364_2.conda + sha256: 95c3f2a595be008ec861ea6bddbf6e2abdfbc115b0e01112b3ae64c7ae641b9e + md5: bb1a2ab9b69fe1bb11d6ad9f1b39c0c4 depends: - __osx >=11.0 - libcxx >=19 @@ -2467,8 +2484,8 @@ packages: license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 257410 - timestamp: 1754063952152 + size: 259025 + timestamp: 1756544906767 - conda: https://conda.anaconda.org/conda-forge/linux-64/correctionlib-2.7.0-py312ha04a795_0.conda sha256: a1ba5aecba02e6c097a835ba6735f81e6fc16596674271307c0f5e97d97f0fa9 md5: 21b524c9949587d0fa07ea4dde450978 @@ -2509,15 +2526,25 @@ packages: - pkg:pypi/correctionlib?source=hash-mapping size: 349264 timestamp: 1746557633533 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cramjam-2.11.0-py312h848b54d_0.conda - sha256: 877f046f4ffc35998a921e70242c8b6e3c3b89fd110b3772d478d70acc3f69a7 - md5: b91caa551c3daa2c48b8aa48fcdfffa6 +- conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.11-py312hd8ed1ab_0.conda + noarch: generic + sha256: 7e7bc8e73a2f3736444a8564cbece7216464c00f0bc38e604b0c792ff60d621a + md5: e5279009e7a7f7edd3cd2880c502b3cc + depends: + - python >=3.12,<3.13.0a0 + - python_abi * *_cp312 + license: Python-2.0 + purls: [] + size: 45852 + timestamp: 1749047748072 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cramjam-2.11.0-py312h848b54d_1.conda + sha256: f7d8515fb12daf0acace81e868777e27a41e3f675545e911f72058f6c448f102 + md5: 413e0bc364c0cd7d0432cc0bc42eab10 depends: - python - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - libstdcxx >=14 - libgcc >=14 + - __glibc >=2.17,<3.0.a0 - python_abi 3.12.* *_cp312 constrains: - __glibc >=2.17 @@ -2525,11 +2552,11 @@ packages: license_family: MIT purls: - pkg:pypi/cramjam?source=hash-mapping - size: 1815497 - timestamp: 1753699211797 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cramjam-2.11.0-py312h82c2aec_0.conda - sha256: 0f4103ea3177d6be44e2448aaca23990d710652ec64629dfac87c5ceadbdc6ba - md5: ef5648cddcd10943a5b3d53a04ff5adc + size: 1811797 + timestamp: 1756864826716 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cramjam-2.11.0-py312h82c2aec_1.conda + sha256: 48a59b50ebd5e160dd4af20fda0a7a977bdd542c7c3c54fbf475f4c25031d133 + md5: 377bd6c81b1a2efeaea71211420457bb depends: - python - __osx >=11.0 @@ -2542,8 +2569,8 @@ packages: license_family: MIT purls: - pkg:pypi/cramjam?source=hash-mapping - size: 1638171 - timestamp: 1753699232961 + size: 1636933 + timestamp: 1756864829531 - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda sha256: 9827efa891e507a91a8a2acf64e210d2aff394e1cde432ad08e1f8c66b12293c md5: 44600c4667a319d67dbe0681fc0bc833 @@ -2656,37 +2683,36 @@ packages: - pkg:pypi/dask-histogram?source=hash-mapping size: 29209 timestamp: 1751928117365 -- conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.16-py312h8285ef7_0.conda - sha256: ad6193b4c2771a82a8df3408d9c6174016b487fd1f7501b1618fa034c5118534 - md5: 6205bf8723b4b79275dd52ef60cf6af1 +- conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.16-py312h8285ef7_1.conda + sha256: 1212cba3b9eb610b53a59c88460049f0cce4e3b8b66c6376e10df3cdd74d80f1 + md5: 45b13b9f0c8995cef3cc4e62f8b4a3f3 depends: - python - - libgcc >=14 - - __glibc >=2.17,<3.0.a0 - libstdcxx >=14 - libgcc >=14 + - __glibc >=2.17,<3.0.a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/debugpy?source=compressed-mapping - size: 2856116 - timestamp: 1754523420446 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.16-py312he360a15_0.conda - sha256: 144542a7c6f3970a8c7012f2b0bea625e0024e809091861f688a7c0786c3e4ee - md5: 5324a4353a78309f0cb874d1fa98e4da + - pkg:pypi/debugpy?source=hash-mapping + size: 2856148 + timestamp: 1756742065364 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.16-py312he360a15_1.conda + sha256: cba7445a8f66174db5474635f57be16366f868609ad4cb8d9fd1bcda0d4c574e + md5: a32874675d3568f218e4ebd9a94cbd43 depends: - python + - libcxx >=19 - __osx >=11.0 - python 3.12.* *_cpython - - libcxx >=19 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/debugpy?source=compressed-mapping - size: 2752346 - timestamp: 1754523441845 + - pkg:pypi/debugpy?source=hash-mapping + size: 2752432 + timestamp: 1756742046739 - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda sha256: c17c6b9937c08ad63cb20a26f403a3234088e57d4455600974a0ce865cb14017 md5: 9ce473d1d1be1cc3810856a48b3fab32 @@ -2776,6 +2802,7 @@ packages: - treescope - python license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/evermore?source=hash-mapping size: 36671 @@ -2791,20 +2818,20 @@ packages: - pkg:pypi/exceptiongroup?source=hash-mapping size: 21284 timestamp: 1746947398083 -- conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.0-pyhd8ed1ab_0.conda - sha256: 7510dd93b9848c6257c43fdf9ad22adf62e7aa6da5f12a6a757aed83bcfedf05 - md5: 81d30c08f9a3e556e8ca9e124b044d14 +- conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + sha256: 210c8165a58fdbf16e626aac93cc4c14dbd551a01d1516be5ecad795d2422cad + md5: ff9efb7f7469aed3c4a8106ffa29593c depends: - - python >=3.9 + - python >=3.10 license: MIT license_family: MIT purls: - - pkg:pypi/executing?source=hash-mapping - size: 29652 - timestamp: 1745502200340 -- conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.59.1-py312h8a5da7c_0.conda - sha256: 8c65a6c9592828ca767161b47e66e66fe8d32b8e1f8af37b10b6594ad1c77340 - md5: 313520338e97b747315b5be6a563c315 + - pkg:pypi/executing?source=compressed-mapping + size: 30753 + timestamp: 1756729456476 +- conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.59.2-py312h8a5da7c_0.conda + sha256: da1c642961e2cad6748266c55ee625062fbdec9f191dc16a29859b2b996a4eea + md5: 4c3f3c752ec0cd37b0a0990af20fd952 depends: - __glibc >=2.17,<3.0.a0 - brotli @@ -2817,11 +2844,11 @@ packages: license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2863893 - timestamp: 1755224234236 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.59.1-py312h6daa0e5_0.conda - sha256: 2751b170e19e03252b4e3a537f42e62396d7a87afa5b8ebce97eea565abbb95a - md5: 55d9d37b29f97b6cd08d6c3dcc8a0712 + size: 2891057 + timestamp: 1756328984659 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.59.2-py312h6daa0e5_0.conda + sha256: d566e5096981a70c87523ff7aff057c9c13bab851df861861003482efb7825e4 + md5: 715f0ac9bac5abe18951f66738cc3e3a depends: - __osx >=11.0 - brotli @@ -2834,8 +2861,8 @@ packages: license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2831709 - timestamp: 1755224364277 + size: 2796136 + timestamp: 1756329018672 - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda sha256: 2509992ec2fd38ab27c7cdb42cf6cadc566a1cc0d1021a2673475d9fa87c6276 md5: d3549fd50d450b6d9e7dddff25dd2110 @@ -2898,17 +2925,17 @@ packages: - pkg:pypi/frozenlist?source=hash-mapping size: 52265 timestamp: 1752167495152 -- conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.7.0-pyhd8ed1ab_0.conda - sha256: f734d98cd046392fbd9872df89ac043d72ac15f6a2529f129d912e28ab44609c - md5: a31ce802cd0ebfce298f342c02757019 +- conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.9.0-pyhd8ed1ab_0.conda + sha256: 05e55a2bd5e4d7f661d1f4c291ca8e65179f68234d18eb70fc00f50934d3c4d3 + md5: 76f492bd8ba8a0fb80ffe16fc1a75b3b depends: - - python >=3.9 + - python >=3.10 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/fsspec?source=compressed-mapping - size: 145357 - timestamp: 1752608821935 + - pkg:pypi/fsspec?source=hash-mapping + size: 145678 + timestamp: 1756908673345 - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-xrootd-0.5.1-pyhe01879c_1.conda sha256: 6a282fe2e51699d7af943abc411560a2f63ee621b03fa4e28b9bd5a566021810 md5: 49f1201fc27ee744ff168596536b7fd1 @@ -2982,19 +3009,20 @@ packages: - pkg:pypi/h11?source=hash-mapping size: 37697 timestamp: 1745526482242 -- conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda - sha256: 0aa1cdc67a9fe75ea95b5644b734a756200d6ec9d0dff66530aec3d1c1e9df75 - md5: b4754fb1bdcb70c8fd54f918301582c6 +- conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + sha256: 84c64443368f84b600bfecc529a1194a3b14c3656ee2e832d15a20e0329b6da3 + md5: 164fc43f0b53b6e3a7bc7dce5e4f1dc9 depends: - - hpack >=4.1,<5 + - python >=3.10 - hyperframe >=6.1,<7 - - python >=3.9 + - hpack >=4.1,<5 + - python license: MIT license_family: MIT purls: - - pkg:pypi/h2?source=hash-mapping - size: 53888 - timestamp: 1738578623567 + - pkg:pypi/h2?source=compressed-mapping + size: 95967 + timestamp: 1756364871835 - conda: https://conda.anaconda.org/conda-forge/noarch/hist-2.9.0-pyhb7efba9_0.conda sha256: 8a7f38ca7c08c62da2c8baea73d895b7c11486693d195e0d04f324af3690f589 md5: 7ce0c713e6e2007fca4aaafaac374d38 @@ -3116,38 +3144,38 @@ packages: - pkg:pypi/idna?source=hash-mapping size: 49765 timestamp: 1733211921194 -- conda: https://conda.anaconda.org/conda-forge/linux-64/iminuit-2.31.1-py312h8285ef7_1.conda - sha256: ef388e178c31d68e9ceb9024ff9d80ce17405d064fd8f78eb73defd794b01e06 - md5: 79baca8c7406cecf8fffb126d4fde120 +- conda: https://conda.anaconda.org/conda-forge/linux-64/iminuit-2.31.1-py312h8285ef7_2.conda + sha256: 48ea144759c2dc73ba5efdcc553a69936daf6a9d07416199b38b968f1c05d9d6 + md5: 13efa82eaf70729e34c858fb87f14117 depends: - python - numpy - - __glibc >=2.17,<3.0.a0 - libstdcxx >=14 - libgcc >=14 + - __glibc >=2.17,<3.0.a0 - python_abi 3.12.* *_cp312 license: LGPL-2.0-or-later license_family: LGPL purls: - pkg:pypi/iminuit?source=hash-mapping - size: 579438 - timestamp: 1753662193587 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/iminuit-2.31.1-py312he360a15_1.conda - sha256: 5ab5e4babff26521a56d61d2323689dccd74654637121f4106f2d0bca73670d8 - md5: eb53f65e80f6d0c058ef39d683de204a + size: 570769 + timestamp: 1756910194162 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/iminuit-2.31.1-py312he360a15_2.conda + sha256: 072444ef80170e5b1faea2ecc687256e514c90f44841dd5834ddfcf25c593bf0 + md5: b40f210e6e542015bcc9c60ab8b1357f depends: - python - numpy + - __osx >=11.0 - libcxx >=19 - python 3.12.* *_cpython - - __osx >=11.0 - python_abi 3.12.* *_cp312 license: LGPL-2.0-or-later license_family: LGPL purls: - pkg:pypi/iminuit?source=hash-mapping - size: 538819 - timestamp: 1753662199774 + size: 525155 + timestamp: 1756910167346 - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda sha256: c18ab120a0613ada4391b15981d86ff777b5690ca461ea7e9e49531e8f374745 md5: 63ccfdc3a3ce25b027b8767eb722fca8 @@ -3234,9 +3262,9 @@ packages: - pkg:pypi/ipykernel?source=hash-mapping size: 121397 timestamp: 1754353050327 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.4.0-pyhfa0c392_0.conda - sha256: ff5138bf6071ca01d84e1329f6baa96f0723df6fe183cfa1ab3ebc96240e6d8f - md5: cb7706b10f35e7507917cefa0978a66d +- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.5.0-pyhfa0c392_0.conda + sha256: e9ca009d3aab9d8a85f0241d6ada2c7fbc84072008e95f803fa59da3294aa863 + md5: c0916cc4b733577cd41df93884d857b0 depends: - __unix - pexpect >4.3 @@ -3257,8 +3285,8 @@ packages: license_family: BSD purls: - pkg:pypi/ipython?source=hash-mapping - size: 628259 - timestamp: 1751465044469 + size: 630826 + timestamp: 1756474504536 - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda sha256: 894682a42a7d659ae12878dbcb274516a7031bbea9104e92f8e88c1f2765a104 md5: bd80ba060603cc228d9d81c257093119 @@ -3404,18 +3432,18 @@ packages: - pkg:pypi/jinja2?source=hash-mapping size: 112714 timestamp: 1741263433881 -- conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.1-pyhd8ed1ab_0.conda - sha256: e5a4eca9a5d8adfaa3d51e24eefd1a6d560cb3b33a7e1eee13e410bec457b7ed - md5: fb1c14694de51a476ce8636d92b6f42c +- conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.5.2-pyhd8ed1ab_0.conda + sha256: 6fc414c5ae7289739c2ba75ff569b79f72e38991d61eb67426a8a4b92f90462c + md5: 4e717929cfa0d49cef92d911e31d0e90 depends: - - python >=3.9 + - python >=3.10 - setuptools license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/joblib?source=hash-mapping - size: 224437 - timestamp: 1748019237972 + size: 224671 + timestamp: 1756321850584 - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.12.1-pyhd8ed1ab_0.conda sha256: 4e08ccf9fa1103b617a4167a270768de736a36be795c6cd34c2761100d332f74 md5: 0fc93f473c31a2f85c0bde213e7c63ca @@ -3439,9 +3467,9 @@ packages: - pkg:pypi/jsonpatch?source=hash-mapping size: 17311 timestamp: 1733814664790 -- conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - sha256: 76ccb7bffc7761d1d3133ffbe1f7f1710a0f0d9aaa9f7ea522652e799f3601f4 - md5: 6b51f7459ea4073eeb5057207e2e1e3d +- conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_2.conda + sha256: 39c77cd86d9f544e3ce11fdbab1047181d08dd14a72461d06d957b5fcfc78615 + md5: eeaf37c3dc2d1660668bd102c841f783 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -3449,11 +3477,11 @@ packages: license_family: BSD purls: - pkg:pypi/jsonpointer?source=hash-mapping - size: 17277 - timestamp: 1725303032027 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_1.conda - sha256: f6fb3734e967d1cd0cde32844ee952809f6c0a49895da7ec1c8cfdf97739b947 - md5: 80f403c03290e1662be03e026fb5f8ab + size: 17957 + timestamp: 1756754245172 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_2.conda + sha256: 1580c22576df479b8a05370a162aa1bca8ba048f6f5c43ec9269e600c64f43b0 + md5: bfd72094f8390de02e426ac61fb7b8ee depends: - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython @@ -3462,8 +3490,8 @@ packages: license_family: BSD purls: - pkg:pypi/jsonpointer?source=hash-mapping - size: 17865 - timestamp: 1725303130815 + size: 18540 + timestamp: 1756754421272 - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.25.1-pyhe01879c_0.conda sha256: ac377ef7762e49cb9c4f985f1281eeff471e9adc3402526eea78e6ac6589cf1d md5: 341fd940c242cf33e832c0402face56f @@ -3512,20 +3540,20 @@ packages: purls: [] size: 4744 timestamp: 1755595646123 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.6-pyhe01879c_0.conda - sha256: 6f2d6c5983e013af68e7e1d7082cc46b11f55e28147bd0a72a44488972ed90a3 - md5: 7129ed52335cc7164baf4d6508a3f233 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.3.0-pyhcf101f3_0.conda + sha256: 897ad2e2c2335ef3c2826d7805e16002a1fd0d509b4ae0bc66617f0e0ff07bc2 + md5: 62b7c96c6cd77f8173cc5cada6a9acaa depends: - importlib-metadata >=4.8.3 - jupyter_server >=1.1.2 - - python >=3.9 + - python >=3.10 - python license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/jupyter-lsp?source=compressed-mapping - size: 58416 - timestamp: 1752935193718 + - pkg:pypi/jupyter-lsp?source=hash-mapping + size: 60377 + timestamp: 1756388269267 - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda sha256: 19d8bd5bb2fde910ec59e081eeb59529491995ce0d653a5209366611023a0b3a md5: 4ebae00eae9705b0c3d6d1018a81d047 @@ -3577,9 +3605,9 @@ packages: - pkg:pypi/jupyter-events?source=hash-mapping size: 23647 timestamp: 1738765986736 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.16.0-pyhe01879c_0.conda - sha256: 0082fb6f0afaf872affee4cde3b210f7f7497a5fb47f2944ab638fef0f0e2e77 - md5: f062e04d7cd585c937acbf194dceec36 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.17.0-pyhcf101f3_0.conda + sha256: 74c4e642be97c538dae1895f7052599dfd740d8bd251f727bce6453ce8d6cd9a + md5: d79a87dcfa726bcea8e61275feed6f83 depends: - anyio >=3.1.0 - argon2-cffi >=21.1 @@ -3593,7 +3621,7 @@ packages: - overrides >=5.0 - packaging >=22.0 - prometheus_client >=0.9 - - python >=3.9 + - python >=3.10 - pyzmq >=24 - send2trash >=1.8.2 - terminado >=0.8.3 @@ -3605,8 +3633,8 @@ packages: license_family: BSD purls: - pkg:pypi/jupyter-server?source=hash-mapping - size: 344376 - timestamp: 1747083217715 + size: 347094 + timestamp: 1755870522134 - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda sha256: 0890fc79422191bc29edf17d7b42cff44ba254aa225d31eb30819f8772b775b8 md5: 2d983ff1b82a1ccb6f2e9d8784bdd6bd @@ -3619,9 +3647,9 @@ packages: - pkg:pypi/jupyter-server-terminals?source=hash-mapping size: 19711 timestamp: 1733428049134 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.4.6-pyhd8ed1ab_0.conda - sha256: c3558f1c2a5977799ce425f1f7c8d8d1cae3408da41ec4f5c3771a21e673d465 - md5: 70cb2903114eafc6ed5d70ca91ba6545 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.4.7-pyhd8ed1ab_0.conda + sha256: 042bdb981ad5394530bee8329a10c76b9e17c12651d15a885d68e2cbbfef6869 + md5: 460d51bb21b7a4c4b6e100c824405fbb depends: - async-lru >=1.0.0 - httpx >=0.25.0,<1 @@ -3634,7 +3662,7 @@ packages: - jupyterlab_server >=2.27.1,<3 - notebook-shim >=0.2 - packaging - - python >=3.9 + - python >=3.10 - setuptools >=41.1.0 - tomli >=1.2.2 - tornado >=6.2.0 @@ -3642,9 +3670,9 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/jupyterlab?source=compressed-mapping - size: 8408461 - timestamp: 1755263247917 + - pkg:pypi/jupyterlab?source=hash-mapping + size: 8479512 + timestamp: 1756911706349 - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda sha256: dc24b900742fdaf1e077d9a3458fd865711de80bca95fe3c6d46610c532c6ef0 md5: fd312693df06da3578383232528c468d @@ -3690,24 +3718,24 @@ packages: purls: [] size: 134088 timestamp: 1754905959823 -- conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_0.conda - sha256: abe5ba0c956c5b830c237a5aaf50516ac9ebccf3f9fd9ffb18a5a11640f43677 - md5: f1f7cfc42b0fa6adb4c304d609077a78 +- conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_1.conda + sha256: 42f856c17ea4b9bce5ac5e91d6e58e15d835a3cac32d71bc592dd5031f9c0fb8 + md5: cec5c1ea565944a94f82cdd6fba7cc76 depends: - python - - __glibc >=2.17,<3.0.a0 - libstdcxx >=14 - libgcc >=14 + - __glibc >=2.17,<3.0.a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/kiwisolver?source=hash-mapping - size: 77278 - timestamp: 1754889408033 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.9-py312hdc12c9d_0.conda - sha256: 290d8f1016c9581bd4d2246bb21832ba4e4ba1c7b059eb9106d92bba561bccc7 - md5: 91384df8de4c340a1232793cf39a12ce + - pkg:pypi/kiwisolver?source=compressed-mapping + size: 77266 + timestamp: 1756467527669 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.9-py312hdc12c9d_1.conda + sha256: 3ad43b1e740a7bce1025a61d55a838eae6196f448f05a2f84447ec796d3148d9 + md5: 57697b25f636e864e62917dfaa9bfcba depends: - python - python 3.12.* *_cpython @@ -3718,8 +3746,8 @@ packages: license_family: BSD purls: - pkg:pypi/kiwisolver?source=hash-mapping - size: 67692 - timestamp: 1754889447292 + size: 67716 + timestamp: 1756467597403 - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda sha256: 99df692f7a8a5c27cd14b5fb1374ee55e756631b9c3d659ed3ee60830249b238 md5: 3f43953b7d3fb3aaa1d0d0723d91e368 @@ -4025,139 +4053,139 @@ packages: purls: [] size: 450755 timestamp: 1750864011299 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-34_h59b9bed_openblas.conda - build_number: 34 - sha256: 08a394ba934f68f102298259b150eb5c17a97c30c6da618e1baab4247366eab3 - md5: 064c22bac20fecf2a99838f9b979374c +- conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-35_h59b9bed_openblas.conda + build_number: 35 + sha256: 83d0755acd486660532003bc2562223504b57732bc7e250985391ce335692cf7 + md5: eaf80af526daf5745295d9964c2bd3cf depends: - libopenblas >=0.3.30,<0.3.31.0a0 - libopenblas >=0.3.30,<1.0a0 constrains: + - blas 2.135 openblas + - liblapack 3.9.0 35*_openblas + - libcblas 3.9.0 35*_openblas + - liblapacke 3.9.0 35*_openblas - mkl <2025 - - blas 2.134 openblas - - liblapacke 3.9.0 34*_openblas - - libcblas 3.9.0 34*_openblas - - liblapack 3.9.0 34*_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 19306 - timestamp: 1754678416811 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-34_h10e41b3_openblas.conda - build_number: 34 - sha256: 5de3c3bfcdc8ba05da1a7815c9953fe392c2065d9efdc2491f91df6d0d1d9e76 - md5: cdb3e1ca1661dbf19f9aad7dad524996 + size: 16937 + timestamp: 1757002815691 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-35_h10e41b3_openblas.conda + build_number: 35 + sha256: 52ddab13634559d8fc9c7808c52200613bb3825c6e0708820f5744aa55324702 + md5: 0b59bf8bb0bc16b2c5bdae947a44e1f1 depends: - libopenblas >=0.3.30,<0.3.31.0a0 - libopenblas >=0.3.30,<1.0a0 constrains: - - blas 2.134 openblas + - libcblas 3.9.0 35*_openblas + - liblapacke 3.9.0 35*_openblas + - liblapack 3.9.0 35*_openblas - mkl <2025 - - liblapacke 3.9.0 34*_openblas - - libcblas 3.9.0 34*_openblas - - liblapack 3.9.0 34*_openblas + - blas 2.135 openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 19533 - timestamp: 1754678956963 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_3.conda - sha256: 462a8ed6a7bb9c5af829ec4b90aab322f8bcd9d8987f793e6986ea873bbd05cf - md5: cb98af5db26e3f482bebb80ce9d947d3 + size: 17191 + timestamp: 1757003619794 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb03c661_4.conda + sha256: 2338a92d1de71f10c8cf70f7bb9775b0144a306d75c4812276749f54925612b6 + md5: 1d29d2e33fe59954af82ef54a8af3fe1 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 + - libgcc >=14 license: MIT license_family: MIT purls: [] - size: 69233 - timestamp: 1749230099545 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h5505292_3.conda - sha256: 0e9c196ad8569ca199ea05103707cde0ae3c7e97d0cdf0417d873148ea9ad640 - md5: fbc4d83775515e433ef22c058768b84d + size: 69333 + timestamp: 1756599354727 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h6caf38d_4.conda + sha256: 023b609ecc35bfee7935d65fcc5aba1a3ba6807cbba144a0730198c0914f7c79 + md5: 231cffe69d41716afe4525c5c1cc5ddd depends: - __osx >=11.0 license: MIT license_family: MIT purls: [] - size: 68972 - timestamp: 1749230317752 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_3.conda - sha256: 3eb27c1a589cbfd83731be7c3f19d6d679c7a444c3ba19db6ad8bf49172f3d83 - md5: 1c6eecffad553bde44c5238770cfb7da + size: 68938 + timestamp: 1756599687687 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb03c661_4.conda + sha256: fcec0d26f67741b122f0d5eff32f0393d7ebd3ee6bb866ae2f17f3425a850936 + md5: 5cb5a1c9a94a78f5b23684bcb845338d depends: - __glibc >=2.17,<3.0.a0 - - libbrotlicommon 1.1.0 hb9d3cd8_3 - - libgcc >=13 + - libbrotlicommon 1.1.0 hb03c661_4 + - libgcc >=14 license: MIT license_family: MIT purls: [] - size: 33148 - timestamp: 1749230111397 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h5505292_3.conda - sha256: d888c228e7d4f0f2303538f6a9705498c81d56fedaab7811e1186cb6e24d689b - md5: 01c4b35a1c4b94b60801f189f1ac6ee3 + size: 33406 + timestamp: 1756599364386 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h6caf38d_4.conda + sha256: 7f1cf83a00a494185fc087b00c355674a0f12e924b1b500d2c20519e98fdc064 + md5: cb7e7fe96c9eee23a464afd57648d2cd depends: - __osx >=11.0 - - libbrotlicommon 1.1.0 h5505292_3 + - libbrotlicommon 1.1.0 h6caf38d_4 license: MIT license_family: MIT purls: [] - size: 29249 - timestamp: 1749230338861 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_3.conda - sha256: 76e8492b0b0a0d222bfd6081cae30612aa9915e4309396fdca936528ccf314b7 - md5: 3facafe58f3858eb95527c7d3a3fc578 + size: 29015 + timestamp: 1756599708339 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb03c661_4.conda + sha256: d42c7f0afce21d5279a0d54ee9e64a2279d35a07a90e0c9545caae57d6d7dc57 + md5: 2e55011fa483edb8bfe3fd92e860cd79 depends: - __glibc >=2.17,<3.0.a0 - - libbrotlicommon 1.1.0 hb9d3cd8_3 - - libgcc >=13 + - libbrotlicommon 1.1.0 hb03c661_4 + - libgcc >=14 license: MIT license_family: MIT purls: [] - size: 282657 - timestamp: 1749230124839 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h5505292_3.conda - sha256: 0734a54db818ddfdfbf388fa53c5036a06bbe17de14005f33215d865d51d8a5e - md5: 1ce5e315293309b5bf6778037375fb08 + size: 289680 + timestamp: 1756599375485 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h6caf38d_4.conda + sha256: a2f2c1c2369360147c46f48124a3a17f5122e78543275ff9788dc91a1d5819dc + md5: 4ce5651ae5cd6eebc5899f9bfe0eac3c depends: - __osx >=11.0 - - libbrotlicommon 1.1.0 h5505292_3 + - libbrotlicommon 1.1.0 h6caf38d_4 license: MIT license_family: MIT purls: [] - size: 274404 - timestamp: 1749230355483 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-34_he106b2a_openblas.conda - build_number: 34 - sha256: edde454897c7889c0323216516abb570a593de728c585b14ef41eda2b08ddf3a - md5: 148b531b5457ad666ed76ceb4c766505 + size: 275791 + timestamp: 1756599724058 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-35_he106b2a_openblas.conda + build_number: 35 + sha256: 6f296f1567a7052c0f8b9527f74cfebc5418dbbae6dcdbae8659963f8ae7f48e + md5: e62d58d32431dabed236c860dfa566ca depends: - - libblas 3.9.0 34_h59b9bed_openblas + - libblas 3.9.0 35_h59b9bed_openblas constrains: - - liblapacke 3.9.0 34*_openblas - - blas 2.134 openblas - - liblapack 3.9.0 34*_openblas + - blas 2.135 openblas + - liblapack 3.9.0 35*_openblas + - liblapacke 3.9.0 35*_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 19313 - timestamp: 1754678426220 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-34_hb3479ef_openblas.conda - build_number: 34 - sha256: 6639f6c6b2e76cb1be62cd6d9033bda7dc3fab2e5a80f5be4b5c522c27dcba17 - md5: e15018d609b8957c146dcb6c356dd50c + size: 16900 + timestamp: 1757002826333 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-35_hb3479ef_openblas.conda + build_number: 35 + sha256: dfd8dd4eea94894a01c1e4d9a409774ad2b71f77e713eb8c0dc62bb47abe2f0b + md5: 7f8a6b52d39bde47c6f2d3ef96bd5e68 depends: - - libblas 3.9.0 34_h10e41b3_openblas + - libblas 3.9.0 35_h10e41b3_openblas constrains: - - liblapack 3.9.0 34*_openblas - - blas 2.134 openblas - - liblapacke 3.9.0 34*_openblas + - liblapacke 3.9.0 35*_openblas + - blas 2.135 openblas + - liblapack 3.9.0 35*_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 19521 - timestamp: 1754678970336 + size: 17163 + timestamp: 1757003634172 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 sha256: fd1d153962764433fe6233f34a72cdeed5dcf8a883a85769e8295ce940b5b0c5 md5: c965a5aa0d5c1c37ffc62dff36e28400 @@ -4212,16 +4240,16 @@ packages: purls: [] size: 403456 timestamp: 1749033320430 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-20.1.8-hf598326_1.conda - sha256: 119b3ac75cb1ea29981e5053c2cb10d5f0b06fcc81b486cb7281f160daf673a1 - md5: a69ef3239d3268ef8602c7a7823fd982 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-21.1.0-hf598326_1.conda + sha256: 58427116dc1b58b13b48163808daa46aacccc2c79d40000f8a3582938876fed7 + md5: 0fb2c0c9b1c1259bc7db75c1342b1d99 depends: - __osx >=11.0 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 568267 - timestamp: 1752814881595 + size: 568692 + timestamp: 1756698505599 - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.24-h86f0d12_0.conda sha256: 8420748ea1cc5f18ecc5068b4f24c7a023cc9b20971c99c824ba10641fb95ddf md5: 64f0c503da58ec25ebd359e4d990afa8 @@ -4398,55 +4426,55 @@ packages: purls: [] size: 333529 timestamp: 1745370142848 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_4.conda - sha256: 144e35c1c2840f2dc202f6915fc41879c19eddbb8fa524e3ca4aa0d14018b26f - md5: f406dcbb2e7bef90d793e50e79a2882b +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_5.conda + sha256: 0caed73aac3966bfbf5710e06c728a24c6c138605121a3dacb2e03440e8baa6a + md5: 264fbfba7fb20acf3b29cde153e345ce depends: - __glibc >=2.17,<3.0.a0 - _openmp_mutex >=4.5 constrains: - - libgcc-ng ==15.1.0=*_4 - - libgomp 15.1.0 h767d61c_4 + - libgomp 15.1.0 h767d61c_5 + - libgcc-ng ==15.1.0=*_5 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 824153 - timestamp: 1753903866511 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_4.conda - sha256: 76ceac93ed98f208363d6e9c75011b0ff7b97b20f003f06461a619557e726637 - md5: 28771437ffcd9f3417c66012dc49a3be + size: 824191 + timestamp: 1757042543820 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_5.conda + sha256: f54bb9c3be12b24be327f4c1afccc2969712e0b091cdfbd1d763fb3e61cda03f + md5: 069afdf8ea72504e48d23ae1171d951c depends: - - libgcc 15.1.0 h767d61c_4 + - libgcc 15.1.0 h767d61c_5 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 29249 - timestamp: 1753903872571 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_4.conda - sha256: 2fe41683928eb3c57066a60ec441e605a69ce703fc933d6d5167debfeba8a144 - md5: 53e876bc2d2648319e94c33c57b9ec74 + size: 29187 + timestamp: 1757042549554 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_5.conda + sha256: 4c1a526198d0d62441549fdfd668cc8e18e77609da1e545bdcc771dd8dc6a990 + md5: 0c91408b3dec0b97e8a3c694845bd63b depends: - - libgfortran5 15.1.0 hcea5267_4 + - libgfortran5 15.1.0 hcea5267_5 constrains: - - libgfortran-ng ==15.1.0=*_4 + - libgfortran-ng ==15.1.0=*_5 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 29246 - timestamp: 1753903898593 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.1.0-hfdf1602_0.conda - sha256: 9620b4ac9d32fe7eade02081cd60d6a359a927d42bb8e121bd16489acd3c4d8c - md5: e3b7dca2c631782ca1317a994dfe19ec + size: 29169 + timestamp: 1757042575979 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.1.0-hfdf1602_1.conda + sha256: 981e3fac416e80b007a2798d6c1d4357ebebeb72a039aca1fb3a7effe9dcae86 + md5: c98207b6e2b1a309abab696d229f163e depends: - - libgfortran5 15.1.0 hb74de2c_0 + - libgfortran5 15.1.0 hb74de2c_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 133859 - timestamp: 1750183546047 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_4.conda - sha256: 3070e5e2681f7f2fb7af0a81b92213f9ab430838900da8b4f9b8cf998ddbdd84 - md5: 8a4ab7ff06e4db0be22485332666da0f + size: 134383 + timestamp: 1756239485494 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_5.conda + sha256: 9d06adc6d8e8187ddc1cad87525c690bc8202d8cb06c13b76ab2fc80a35ed565 + md5: fbd4008644add05032b6764807ee2cba depends: - __glibc >=2.17,<3.0.a0 - libgcc >=15.1.0 @@ -4455,11 +4483,11 @@ packages: license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 1564595 - timestamp: 1753903882088 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.1.0-hb74de2c_0.conda - sha256: 44b8ce4536cc9a0e59c09ff404ef1b0120d6a91afc32799331d85268cbe42438 - md5: 8b158ccccd67a40218e12626a39065a1 + size: 1564589 + timestamp: 1757042559498 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.1.0-hb74de2c_1.conda + sha256: 1f8f5b2fdd0d2559d0f3bade8da8f57e9ee9b54685bd6081c6d6d9a2b0239b41 + md5: 4281bd1c654cb4f5cab6392b3330451f depends: - llvm-openmp >=8.0.0 constrains: @@ -4467,18 +4495,18 @@ packages: license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 758352 - timestamp: 1750182604206 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_4.conda - sha256: e0487a8fec78802ac04da0ac1139c3510992bc58a58cde66619dde3b363c2933 - md5: 3baf8976c96134738bba224e9ef6b1e5 + size: 759679 + timestamp: 1756238772083 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_5.conda + sha256: 125051d51a8c04694d0830f6343af78b556dd88cc249dfec5a97703ebfb1832d + md5: dcd5ff1940cd38f6df777cac86819d60 depends: - __glibc >=2.17,<3.0.a0 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 447289 - timestamp: 1753903801049 + size: 447215 + timestamp: 1757042483384 - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.36.0-hc4361e1_1.conda sha256: 3a56c653231d6233de5853dc01f07afad6a332799a39c3772c0948d2e68547e4 md5: ae36e6296a8dd8e8a9a8375965bf6398 @@ -4638,36 +4666,36 @@ packages: purls: [] size: 553624 timestamp: 1745268405713 -- conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-34_h7ac8fdf_openblas.conda - build_number: 34 - sha256: 9c941d5da239f614b53065bc5f8a705899326c60c9f349d9fbd7bd78298f13ab - md5: f05a31377b4d9a8d8740f47d1e70b70e +- conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-35_h7ac8fdf_openblas.conda + build_number: 35 + sha256: 3967e62d4d1d5c1492f861864afca95aaa8cac14e696ce7b9be7d0b6a50507e8 + md5: 88fa5489509c1da59ab2ee6b234511a5 depends: - - libblas 3.9.0 34_h59b9bed_openblas + - libblas 3.9.0 35_h59b9bed_openblas constrains: - - liblapacke 3.9.0 34*_openblas - - libcblas 3.9.0 34*_openblas - - blas 2.134 openblas + - blas 2.135 openblas + - libcblas 3.9.0 35*_openblas + - liblapacke 3.9.0 35*_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 19324 - timestamp: 1754678435277 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-34_hc9a63f6_openblas.conda - build_number: 34 - sha256: 659c7cc2d7104c5fa33482d28a6ce085fd116ff5625a117b7dd45a3521bf8efc - md5: 94b13d05122e301de02842d021eea5fb + size: 16920 + timestamp: 1757002835750 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-35_hc9a63f6_openblas.conda + build_number: 35 + sha256: 84f1c11187b564a9fdf464dad46d436ade966262e3000f7c5037b56b244f6fb8 + md5: 437d6c679b3d959d87b3b735fcc0b4ee depends: - - libblas 3.9.0 34_h10e41b3_openblas + - libblas 3.9.0 35_h10e41b3_openblas constrains: - - libcblas 3.9.0 34*_openblas - - blas 2.134 openblas - - liblapacke 3.9.0 34*_openblas + - liblapacke 3.9.0 35*_openblas + - libcblas 3.9.0 35*_openblas + - blas 2.135 openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 19532 - timestamp: 1754678979401 + size: 17166 + timestamp: 1757003647724 - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda sha256: f2591c0069447bbe28d4d696b7fcb0c5bd0b4ac582769b89addbcf26fb3430d8 md5: 1a580f7796c7bf6393fddb8bbbde58dc @@ -4691,39 +4719,39 @@ packages: purls: [] size: 92286 timestamp: 1749230283517 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda - sha256: b0f2b3695b13a989f75d8fd7f4778e1c7aabe3b36db83f0fe80b2cd812c0e975 - md5: 19e57602824042dfd0446292ef90488b +- conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda + sha256: a4a7dab8db4dc81c736e9a9b42bdfd97b087816e029e221380511960ac46c690 + md5: b499ce4b026493a13774bcf0f4c33849 depends: - __glibc >=2.17,<3.0.a0 - - c-ares >=1.32.3,<2.0a0 + - c-ares >=1.34.5,<2.0a0 - libev >=4.33,<4.34.0a0 - libev >=4.33,<5.0a0 - - libgcc >=13 - - libstdcxx >=13 + - libgcc >=14 + - libstdcxx >=14 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 + - openssl >=3.5.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 647599 - timestamp: 1729571887612 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.64.0-h6d7220d_0.conda - sha256: 00cc685824f39f51be5233b54e19f45abd60de5d8847f1a56906f8936648b72f - md5: 3408c02539cee5f1141f9f11450b6a51 + size: 666600 + timestamp: 1756834976695 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.67.0-hc438710_0.conda + sha256: a07cb53b5ffa2d5a18afc6fd5a526a5a53dd9523fbc022148bd2f9395697c46d + md5: a4b4dd73c67df470d091312ab87bf6ae depends: - __osx >=11.0 - - c-ares >=1.34.2,<2.0a0 - - libcxx >=17 + - c-ares >=1.34.5,<2.0a0 + - libcxx >=19 - libev >=4.33,<4.34.0a0 - libev >=4.33,<5.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 + - openssl >=3.5.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 566719 - timestamp: 1729572385640 + size: 575454 + timestamp: 1756835746393 - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda sha256: 927fe72b054277cde6cb82597d0fcf6baf127dcbce2e0a9d8925a68f1265eef5 md5: d864d34357c3b65a4b731f78c0801dc4 @@ -4997,27 +5025,27 @@ packages: purls: [] size: 279193 timestamp: 1745608793272 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_4.conda - sha256: b5b239e5fca53ff90669af1686c86282c970dd8204ebf477cf679872eb6d48ac - md5: 3c376af8888c386b9d3d1c2701e2f3ab +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_5.conda + sha256: 0f5f61cab229b6043541c13538d75ce11bd96fb2db76f94ecf81997b1fde6408 + md5: 4e02a49aaa9d5190cb630fa43528fbe6 depends: - __glibc >=2.17,<3.0.a0 - - libgcc 15.1.0 h767d61c_4 + - libgcc 15.1.0 h767d61c_5 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 3903453 - timestamp: 1753903894186 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_4.conda - sha256: 81c841c1cf4c0d06414aaa38a249f9fdd390554943065c3a0b18a9fb7e8cc495 - md5: 2d34729cbc1da0ec988e57b13b712067 + size: 3896432 + timestamp: 1757042571458 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_5.conda + sha256: 7b8cabbf0ab4fe3581ca28fe8ca319f964078578a51dd2ca3f703c1d21ba23ff + md5: 8bba50c7f4679f08c861b597ad2bda6b depends: - - libstdcxx 15.1.0 h8f9b012_4 + - libstdcxx 15.1.0 h8f9b012_5 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 29317 - timestamp: 1753903924491 + size: 29233 + timestamp: 1757042603319 - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda sha256: ebb395232973c18745b86c9a399a4725b2c39293c9a91b8e59251be013db42f0 md5: dcb95c0a98ba9ff737f7ae482aef7833 @@ -5251,26 +5279,26 @@ packages: - pkg:pypi/lineax?source=hash-mapping size: 48045 timestamp: 1734897369744 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-20.1.8-hbb9b287_1.conda - sha256: e56f46b253dd1a99cc01dde038daba7789fc6ed35b2a93e3fc44b8578a82b3ec - md5: a10bdc3e5d9e4c1ce554c83855dff6c4 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-21.1.0-hbb9b287_0.conda + sha256: c6750073a128376a14bedacfa90caab4c17025c9687fcf6f96e863b28d543af4 + md5: e57d95fec6eaa747e583323cba6cfe5c depends: - __osx >=11.0 constrains: - - openmp 20.1.8|20.1.8.* - intel-openmp <0.0a0 + - openmp 21.1.0|21.1.0.* license: Apache-2.0 WITH LLVM-exception license_family: APACHE purls: [] - size: 283300 - timestamp: 1753978829840 -- conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312h374181b_1.conda - sha256: 1fff6550e0adaaf49dd844038b6034657de507ca50ac695e22284898e8c1e2c2 - md5: 146d3cc72c65fdac198c09effb6ad133 + size: 286039 + timestamp: 1756673290280 +- conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312he100287_2.conda + sha256: 254102ea2e878ddccd4e7b6468cf0d65d6be52242f7b009dbde299c8e58f1842 + md5: 36676f8daca4611c7566837b838695b9 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 + - libgcc >=14 + - libstdcxx >=14 - libzlib >=1.3.1,<2.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -5278,14 +5306,14 @@ packages: license_family: BSD purls: - pkg:pypi/llvmlite?source=hash-mapping - size: 29996918 - timestamp: 1742815908291 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312h728bc31_1.conda - sha256: a8c486e6094863fdcd0ddf6f8e53d25b604c3b246bd70c26aaee42336c059de0 - md5: dfb6368d07cc87bc9ca88e50d8c957eb + size: 29999586 + timestamp: 1756303919897 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312hc9b382d_2.conda + sha256: 7d4f44686411d569119df4c340292f4cdb9b27a5040caaea95cd570282bf14be + md5: 768549d1e202865d3933d28d38fea5e3 depends: - __osx >=11.0 - - libcxx >=18 + - libcxx >=19 - libzlib >=1.3.1,<2.0a0 - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython @@ -5294,8 +5322,8 @@ packages: license_family: BSD purls: - pkg:pypi/llvmlite?source=hash-mapping - size: 18899919 - timestamp: 1742816321457 + size: 18895658 + timestamp: 1756304209362 - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 sha256: 9afe0b5cfa418e8bdb30d8917c5a6cec10372b037924916f1f85b9f4899a67a6 md5: 91e27ef3d05cc772ce627e51cff111c4 @@ -5307,24 +5335,24 @@ packages: - pkg:pypi/locket?source=hash-mapping size: 8250 timestamp: 1650660473123 -- conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.4-py312hf0f0c11_0.conda - sha256: a04aff570a27173eea3a2b515b4794ce20e058b658f642475f72ccc1f6d88cff - md5: f770ae71fc1800e7a735a7b452c0ab81 +- conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.4-py312h5d89b6d_1.conda + sha256: 672bd94e67feff49461b7eb7a3ca08100681ebf76456e1f98fa0f08b17a04d2e + md5: bdcd58b62f85ad9554b5b6b5020683b8 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 + - libgcc >=14 - lz4-c >=1.10.0,<1.11.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/lz4?source=hash-mapping - size: 40315 - timestamp: 1746562078119 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.4-py312hf263c89_0.conda - sha256: 265cd74fdace1106dbaf395bcf8d1cc2b1d20f998ff0694451f2a91f9804c7d8 - md5: be22e508c6268b4c7b7b147845deb7f5 + - pkg:pypi/lz4?source=compressed-mapping + size: 40272 + timestamp: 1756752102787 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.4-py312hb64cbc0_1.conda + sha256: 0239fb52b2d9e0d9e2e38653c1e01c7524ca86ca7d497e714d770040676f11b0 + md5: bb2585a3de046f966d4ec64694b3b3df depends: - __osx >=11.0 - lz4-c >=1.10.0,<1.11.0a0 @@ -5335,8 +5363,8 @@ packages: license_family: BSD purls: - pkg:pypi/lz4?source=hash-mapping - size: 106981 - timestamp: 1746562316986 + size: 106834 + timestamp: 1756752258937 - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda sha256: 47326f811392a5fd3055f0f773036c392d26fdb32e4d8e7a8197eed951489346 md5: 9de5350a85c4a20c685259b889aa6393 @@ -5404,9 +5432,9 @@ packages: - pkg:pypi/markupsafe?source=hash-mapping size: 24048 timestamp: 1733219945697 -- conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.5-py312he3d6523_0.conda - sha256: 66e94e6226fd3dd04bb89d04079e2d8e2c74d923c0bbf255e483f127aee621ff - md5: 9246288e5ef2a944f7c9c648f9f331c7 +- conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.6-py312he3d6523_1.conda + sha256: 9af1c0e8a9551edfb1fbee0595a00108204af3d34c1680271b0121846dc21e77 + md5: 94926ee1d68e678fb4cfdb0727a0927e depends: - __glibc >=2.17,<3.0.a0 - contourpy >=1.0.1 @@ -5431,12 +5459,12 @@ packages: license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/matplotlib?source=compressed-mapping - size: 8071030 - timestamp: 1754005868258 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.5-py312h05635fa_0.conda - sha256: bc44413a9f1984e6ab39bd0b805430a4e11e41e1d0389254c4d2d056be610512 - md5: 96e5de8c96b4557430f6af0d6693d4c9 + - pkg:pypi/matplotlib?source=hash-mapping + size: 8250974 + timestamp: 1756869718533 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.6-py312h605b88b_1.conda + sha256: 9d55cdf55760552e42cfd0bc867f6902754aa2aeb4f661cee715a27e447b4886 + md5: 63773c3db15b238aaa49b34a27cdee9b depends: - __osx >=11.0 - contourpy >=1.0.1 @@ -5460,9 +5488,9 @@ packages: license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/matplotlib?source=compressed-mapping - size: 8031746 - timestamp: 1754005848626 + - pkg:pypi/matplotlib?source=hash-mapping + size: 8215007 + timestamp: 1756870267276 - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda sha256: 69b7dc7131703d3d60da9b0faa6dd8acbf6f6c396224cf6aef3e855b8c0c41c6 md5: af6ab708897df59bd6e7283ceab1b56b @@ -5486,52 +5514,52 @@ packages: - pkg:pypi/mdurl?source=hash-mapping size: 14465 timestamp: 1733255681319 -- conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.3-pyh29332c3_0.conda - sha256: a67484d7dd11e815a81786580f18b6e4aa2392f292f29183631a6eccc8dc37b3 - md5: 7ec6576e328bc128f4982cd646eeba85 +- conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.4-pyhcf101f3_0.conda + sha256: 609ea628ace5c6cdbdce772704e6cb159ead26969bb2f386ca1757632b0f74c6 + md5: f5a4d548d1d3bdd517260409fc21e205 depends: - - python >=3.9 + - python >=3.10 - typing_extensions - python license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/mistune?source=hash-mapping - size: 72749 - timestamp: 1742402716323 -- conda: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.5.1-py312hf9745cd_0.conda - sha256: 87928a36d350c470455a322c4c2b82266b88322d0fd5187ae8cc6fb5e3aad61f - md5: c45ac8395a27736c27b2e50b53ffe62c + size: 72996 + timestamp: 1756495311698 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.5.1-py312hf79963d_1.conda + sha256: 973b99bc7f54698875b1b075ae515e407cf0c2fe7776668f009862bc3fa5f8c2 + md5: ea06beae50d783cc5741f7b221a7444c depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - numpy >=1.19,<3 + - libgcc >=14 + - libstdcxx >=14 + - numpy >=1.23,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MPL-2.0 AND Apache-2.0 purls: - pkg:pypi/ml-dtypes?source=hash-mapping - size: 290991 - timestamp: 1736538940686 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ml_dtypes-0.5.1-py312hcb1e3ce_0.conda - sha256: 17f70a0f345722e67f7437895a78cce84b758419f1c373186cec671607270747 - md5: d7a33fc18bf71480224e069be3072bbf + size: 293495 + timestamp: 1756742390741 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ml_dtypes-0.5.1-py312h98f7732_1.conda + sha256: 7feec9d78e66607f61ebce14dcb670001414ce9e6b4db8cecc5ac294daf323be + md5: c87e654e2417d4b2de8b148b4c993ff6 depends: - __osx >=11.0 - - libcxx >=18 - - numpy >=1.19,<3 + - libcxx >=19 + - numpy >=1.23,<3 - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython - python_abi 3.12.* *_cp312 license: MPL-2.0 AND Apache-2.0 purls: - pkg:pypi/ml-dtypes?source=hash-mapping - size: 200130 - timestamp: 1736539205286 -- conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.0-pyhd8ed1ab_0.conda - sha256: 25062ab9e7d77334c843b43e895da3f13f9fccc8bf07fe043ee5fdbda063e337 - md5: 012edca20f48cfc8d92199777dcd7a90 + size: 205416 + timestamp: 1756742744061 +- conda: https://conda.anaconda.org/conda-forge/noarch/mplhep-0.4.1-pyhd8ed1ab_0.conda + sha256: 8cc30255a5eda612db4fa6b7eb998a34432213c7a2b62c06773a88adb26ea4f0 + md5: 2969d22d9c6ca07d5087ce9fbbb1d39b depends: - matplotlib-base >=3.4 - mplhep_data >=0.0.4 @@ -5543,8 +5571,8 @@ packages: license_family: MIT purls: - pkg:pypi/mplhep?source=hash-mapping - size: 45836 - timestamp: 1751407321164 + size: 47103 + timestamp: 1756231131892 - conda: https://conda.anaconda.org/conda-forge/noarch/mplhep_data-0.0.4-pyhd8ed1ab_2.conda sha256: 6d2ca813c319e53248b91980fd1ed10e2368c0a80421b6964c498158c24b3e6e md5: 5bd1e05f702e5928d6405dd33d879d0b @@ -5555,27 +5583,27 @@ packages: - pkg:pypi/mplhep-data?source=hash-mapping size: 5816472 timestamp: 1734632891284 -- conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.1-py312h68727a3_0.conda - sha256: 969b8e50922b592228390c25ac417c0761fd6f98fccad870ac5cc84f35da301a - md5: 6998b34027ecc577efe4e42f4b022a98 +- conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.1-py312hd9148b4_1.conda + sha256: 5c1a49c4afecfc7c542760711e8075cb8115997c47f52b7af0fc554f6f260b5c + md5: f81ef4109d77d92188bdc25712c0ff17 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 + - libgcc >=14 + - libstdcxx >=14 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: Apache purls: - pkg:pypi/msgpack?source=hash-mapping - size: 102924 - timestamp: 1749813333354 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.1-py312hb23fbb9_0.conda - sha256: 0cdc5fcdb75727a13cbcfc49e00b0fddf6705c7bd908aee1dd1e7a869de8dfe9 - md5: 4ae8111ba5af53e50cb6f9d1705c408c + size: 103174 + timestamp: 1756678658638 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.1-py312ha0dd364_1.conda + sha256: e2faf46924ecf94aebdaa78c093b0b2c05fa72f06be2d311e26fb5fee89c3ba7 + md5: feaa731094db962528c2f51e24370597 depends: - __osx >=11.0 - - libcxx >=18 + - libcxx >=19 - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython - python_abi 3.12.* *_cp312 @@ -5583,8 +5611,8 @@ packages: license_family: Apache purls: - pkg:pypi/msgpack?source=hash-mapping - size: 91155 - timestamp: 1749813638452 + size: 91520 + timestamp: 1756678855205 - conda: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.6.3-py312h178313f_0.conda sha256: c703d148a85ffb4f11001d31b7c4c686a46ad554eeeaa02c69da59fbf0e00dbb md5: f4e246ec4ccdf73e50eefb0fa359a64e @@ -5624,18 +5652,18 @@ packages: - pkg:pypi/munkres?source=hash-mapping size: 15851 timestamp: 1749895533014 -- conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.1.2-pyhe01879c_0.conda - sha256: 54c58f45029b79a1fec25dc6f6179879afa4dddb73e5c38c85e574f66bb1d930 - md5: 90d3b6c75c144e8c461b846410d7c0bf +- conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.3.0-pyhcf101f3_0.conda + sha256: 8877c4bc67b9578766f905139e510ac8669d7200f0e3adf0b4e04d7ecc214c15 + md5: ae268cbf8676bb70014132fc9dd1a0e3 depends: - - python >=3.9 + - python >=3.10 - python license: MIT license_family: MIT purls: - pkg:pypi/narwhals?source=hash-mapping - size: 243121 - timestamp: 1755254908603 + size: 251171 + timestamp: 1756741653794 - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda sha256: a20cff739d66c2f89f413e4ba4c6f6b59c50d5c30b5f0d840c13e8c9c2df9135 md5: 6bb0d77277061742744176ab555b723c @@ -6149,17 +6177,18 @@ packages: - pkg:pypi/pandocfilters?source=hash-mapping size: 11627 timestamp: 1631603397334 -- conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda - sha256: 17131120c10401a99205fc6fe436e7903c0fa092f1b3e80452927ab377239bcc - md5: 5c092057b6badd30f75b06244ecd01c9 +- conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.5-pyhcf101f3_0.conda + sha256: 30de7b4d15fbe53ffe052feccde31223a236dae0495bab54ab2479de30b2990f + md5: a110716cdb11cf51482ff4000dc253d7 depends: - - python >=3.9 + - python >=3.10 + - python license: MIT license_family: MIT purls: - pkg:pypi/parso?source=hash-mapping - size: 75295 - timestamp: 1733271352153 + size: 81562 + timestamp: 1755974222274 - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda sha256: 472fc587c63ec4f6eba0cc0b06008a6371e0a08a5986de3cf4e8024a47b4fe6c md5: 0badf9c54e24cecfb0ad2f99d680c163 @@ -6195,18 +6224,18 @@ packages: - pkg:pypi/pickleshare?source=hash-mapping size: 11748 timestamp: 1733327448200 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py312h80c1187_0.conda - sha256: 7c9a8f65a200587bf7a0135ca476f9c472348177338ed8b825ddcc08773fde68 - md5: 7911e727a6c24db662193a960b81b6b2 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py312h0e488c8_1.conda + sha256: 27248be068e1ad8be4872e49d89f22904bb578e2a5bafcb2bfffef7da544f242 + md5: 6b8bc565d829a9e46178da86e05e19d0 depends: - __glibc >=2.17,<3.0.a0 - lcms2 >=2.17,<3.0a0 - libfreetype >=2.13.3 - libfreetype6 >=2.13.3 - - libgcc >=13 + - libgcc >=14 - libjpeg-turbo >=3.1.0,<4.0a0 - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 + - libwebp-base >=1.6.0,<2.0a0 - libxcb >=1.17.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.3,<3.0a0 @@ -6215,12 +6244,12 @@ packages: - tk >=8.6.13,<8.7.0a0 license: HPND purls: - - pkg:pypi/pillow?source=hash-mapping - size: 42964111 - timestamp: 1751482158083 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.3.0-py312h50aef2c_0.conda - sha256: 3d60288e8cfd42e4548c9e5192a285e73f81df2869f69b9d3905849b45d9bd2a - md5: dddff48655b5cd24a5170a6df979943a + - pkg:pypi/pillow?source=compressed-mapping + size: 42228308 + timestamp: 1756853546193 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.3.0-py312hce42e9c_1.conda + sha256: 01c6a1cca59bac7bb9c22bfbc04806246abd4833d9b2ba7f4087935099f69211 + md5: 2f34c31cdeb63c63316484b268b8500e depends: - __osx >=11.0 - lcms2 >=2.17,<3.0a0 @@ -6228,7 +6257,7 @@ packages: - libfreetype6 >=2.13.3 - libjpeg-turbo >=3.1.0,<4.0a0 - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 + - libwebp-base >=1.6.0,<2.0a0 - libxcb >=1.17.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.3,<3.0a0 @@ -6239,20 +6268,20 @@ packages: license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42514714 - timestamp: 1751482419501 -- conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.8-pyhe01879c_0.conda - sha256: 0f48999a28019c329cd3f6fd2f01f09fc32cc832f7d6bbe38087ddac858feaa3 - md5: 424844562f5d337077b445ec6b1398a7 + size: 41849213 + timestamp: 1756853810909 +- conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.4.0-pyhcf101f3_0.conda + sha256: dfe0fa6e351d2b0cef95ac1a1533d4f960d3992f9e0f82aeb5ec3623a699896b + md5: cc9d9a3929503785403dbfad9f707145 depends: - - python >=3.9 + - python >=3.10 - python license: MIT license_family: MIT purls: - - pkg:pypi/platformdirs?source=hash-mapping - size: 23531 - timestamp: 1746710438805 + - pkg:pypi/platformdirs?source=compressed-mapping + size: 23653 + timestamp: 1756227402815 - conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda sha256: 013669433eb447548f21c3c6b16b2ed64356f726b5f77c1b39d5ba17a8a4b8bc md5: a83f6a2fdc079e643237887a37460668 @@ -6293,20 +6322,20 @@ packages: - pkg:pypi/prometheus-client?source=hash-mapping size: 52641 timestamp: 1748896836631 -- conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.51-pyha770c72_0.conda - sha256: ebc1bb62ac612af6d40667da266ff723662394c0ca78935340a5b5c14831227b - md5: d17ae9db4dc594267181bd199bf9a551 +- conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + sha256: 4817651a276016f3838957bfdf963386438c70761e9faec7749d411635979bae + md5: edb16f14d920fb3faf17f5ce582942d6 depends: - - python >=3.9 + - python >=3.10 - wcwidth constrains: - - prompt_toolkit 3.0.51 + - prompt_toolkit 3.0.52 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/prompt-toolkit?source=hash-mapping - size: 271841 - timestamp: 1744724188108 + size: 273927 + timestamp: 1756321848365 - conda: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda sha256: d0ff67d89cf379a9f0367f563320621f0bc3969fe7f5c85e020f437de0927bb4 md5: 0cf580c1b73146bb9ff1bbdb4d4c8cf9 @@ -6571,9 +6600,9 @@ packages: - pkg:pypi/pyhf?source=hash-mapping size: 120909 timestamp: 1740702170788 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.1-py312h4c66426_0.conda - sha256: d4b1ae7f925720c1a6643c03199c6a47ba6a536bfd630f522baa5fe6ebf4a786 - md5: 02247b8a9ba52a15a53edd6d4cf9dac4 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.1-py312h4c66426_1.conda + sha256: d681491154780f106cdecda4c03b2952ced102013a9c1631d21baa6500e9d335 + md5: 443e404c3d27e6c7925dd5e4a9672c5e depends: - __osx >=11.0 - libffi >=3.4.6,<3.5.0a0 @@ -6584,12 +6613,12 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/pyobjc-core?source=hash-mapping - size: 474838 - timestamp: 1750207878592 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.1-py312hb9d441b_0.conda - sha256: a6f262fe5706c73dce7ca7fbec9a055fc225422ad8d7fc45dd66ad9dddb0afe3 - md5: 5b7a58b273bca2c67dd8ddaea92e404e + - pkg:pypi/pyobjc-core?source=compressed-mapping + size: 474222 + timestamp: 1756813261461 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.1-py312h3964663_1.conda + sha256: f4f6ef1261080336fa3c37cb1a853fcd5119845f8a91d3f3cadec40ea39dbbd6 + md5: bf1e3e9543301d705427a763a92eb2ef depends: - __osx >=11.0 - libffi >=3.4.6,<3.5.0a0 @@ -6601,8 +6630,8 @@ packages: license_family: MIT purls: - pkg:pypi/pyobjc-framework-cocoa?source=hash-mapping - size: 386128 - timestamp: 1750225477437 + size: 382439 + timestamp: 1756824097019 - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.3-pyhe01879c_2.conda sha256: afe32182b1090911b64ac0f29eb47e03a015d142833d8a917defd65d91c99b74 md5: aa0028616c0750c773698fdc254b2b8d @@ -6701,6 +6730,16 @@ packages: - pkg:pypi/fastjsonschema?source=hash-mapping size: 244628 timestamp: 1755304154927 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-gil-3.12.11-hd8ed1ab_0.conda + sha256: b8afeaefe409d61fa4b68513b25a66bb17f3ca430d67cfea51083c7bfbe098ef + md5: 859c6bec94cd74119f12b961aba965a8 + depends: + - cpython 3.12.11.* + - python_abi * *_cp312 + license: Python-2.0 + purls: [] + size: 45836 + timestamp: 1749047798827 - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda sha256: 4790787fe1f4e8da616edca4acf6a4f8ed4e7c6967aa31b920208fc8f95efcca md5: a61bf9ec79426938ff785eb69dbb1960 @@ -6805,40 +6844,39 @@ packages: - pkg:pypi/pyyaml?source=hash-mapping size: 192148 timestamp: 1737454886351 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-27.0.2-py312h6748674_0.conda - sha256: d697fb7e36427b085feffd63288365be543f7c2a779e35205cb1e52d1ca49957 - md5: e0770749ec419e8e68e71716507c1be4 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-27.0.2-py312hfb55c3c_3.conda + noarch: python + sha256: b129a83b432c6302b10be57205f0ed47682575720a6c04f342e925872636a3ef + md5: 22a9ef9d4c242cc03376844ed54e1756 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libsodium >=1.0.20,<1.0.21.0a0 + - python - libstdcxx >=14 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - _python_abi3_support 1.* + - cpython >=3.12 - zeromq >=4.3.5,<4.4.0a0 license: BSD-3-Clause - license_family: BSD purls: - - pkg:pypi/pyzmq?source=compressed-mapping - size: 381481 - timestamp: 1755799909607 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyzmq-27.0.2-py312h211b278_0.conda - sha256: b958515ddde46cd2301df2e10236087222bf0cb740f0298f12a6dc94ed9635f3 - md5: a4a61ef89d6d5a2c2c4b3acd6bf338b4 + - pkg:pypi/pyzmq?source=hash-mapping + size: 211830 + timestamp: 1757116192543 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyzmq-27.0.2-py312hd65ceae_3.conda + noarch: python + sha256: 165c6bb07a03c03a2e49e44ec0ae93e232c8713ae2174dd63173123a4f904a29 + md5: 8c630a2acaf139386e9d9eb8d5da0d12 depends: - - __osx >=11.0 + - python - libcxx >=19 - - libsodium >=1.0.20,<1.0.21.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 + - __osx >=11.0 + - _python_abi3_support 1.* + - cpython >=3.12 - zeromq >=4.3.5,<4.4.0a0 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/pyzmq?source=hash-mapping - size: 358689 - timestamp: 1755799848254 + size: 190675 + timestamp: 1757116278310 - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda sha256: 776363493bad83308ba30bcb88c2552632581b143e8ee25b1982c8c743e73abc md5: 353823361b1d27eb3960efb076dfcaf6 @@ -7004,9 +7042,9 @@ packages: - pkg:pypi/rich?source=hash-mapping size: 201098 timestamp: 1753436991345 -- conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.27.0-py312h868fb18_0.conda - sha256: cfc9c79f0e2658754b02efb890fe3c835d865ed0535155787815ae16e56dbe9c - md5: 3d3d11430ec826a845a0e9d6ccefa294 +- conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.27.1-py312h868fb18_1.conda + sha256: 76efba673e02d4d47bc2de6e48a8787ed98bae4933233dee5ce810fa3de6ef2b + md5: 0e32f9c8ca00c1b926a1b77be6937112 depends: - python - __glibc >=2.17,<3.0.a0 @@ -7017,12 +7055,12 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/rpds-py?source=compressed-mapping - size: 388899 - timestamp: 1754570135763 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.27.0-py312h6f58b40_0.conda - sha256: 0a14b856d41b4ef51a4c67fd8200b18c1c21ba0f252a2e3f9f85678149e08141 - md5: ccbe846733e149a842df80f53f66ca72 + - pkg:pypi/rpds-py?source=hash-mapping + size: 389483 + timestamp: 1756737801011 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.27.1-py312h6f58b40_1.conda + sha256: 6a7d5d862f90a1d594213d2be34598988e34dd19040fd865dcd60c4fca023cbc + md5: ba21b22f398ede2df8c35f88a967be97 depends: - python - __osx >=11.0 @@ -7033,9 +7071,9 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/rpds-py?source=compressed-mapping - size: 357078 - timestamp: 1754569997063 + - pkg:pypi/rpds-py?source=hash-mapping + size: 355109 + timestamp: 1756737521820 - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.22-h96f233e_0.conda sha256: 12dc8ff959fbf28384fdfd8946a71bdfa77ec84f40dcd0ca5a4ae02a652583ca md5: 2f6fc0cf7cd248a32a52d7c8609d93a9 @@ -7090,9 +7128,9 @@ packages: - pkg:pypi/scikit-learn?source=hash-mapping size: 8931629 timestamp: 1752826246695 -- conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.1-py312h4ebe9ca_0.conda - sha256: 988c9fb07058639c3ff6d8e1171a11dbd64bcc14d5b2dfe3039b610f6667b316 - md5: b01bd2fd775d142ead214687b793d20d +- conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.1-py312h7a1785b_1.conda + sha256: d5e65c845446ae64ad55b2ee0571f29d1ac39c8ced36e9f5a04d2d105e61fab9 + md5: b965f164d14d4cffe1ddcf39195b63d6 depends: - __glibc >=2.17,<3.0.a0 - libblas >=3.9.0,<4.0a0 @@ -7111,11 +7149,11 @@ packages: license_family: BSD purls: - pkg:pypi/scipy?source=hash-mapping - size: 17190354 - timestamp: 1754970575489 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.16.1-py312h286a95b_0.conda - sha256: 2d9d0173b58010c2ee09280b7e4fa185d191380a4f042698263b4ffa2671818b - md5: 9841d229c34dbca6fd039e76cfca307b + size: 17014751 + timestamp: 1756530097597 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.16.1-py312h6e75237_1.conda + sha256: 005096a5a5731c61484e1a901a11f082e83d92ad13588a59bea186be9f41bb85 + md5: a1f1ef4fafc37c93d4f77947d2b5e5d2 depends: - __osx >=11.0 - libblas >=3.9.0,<4.0a0 @@ -7135,8 +7173,8 @@ packages: license_family: BSD purls: - pkg:pypi/scipy?source=hash-mapping - size: 13840981 - timestamp: 1754970654942 + size: 13773643 + timestamp: 1756530081074 - conda: https://conda.anaconda.org/conda-forge/linux-64/scitokens-cpp-1.1.3-h6ac2c77_0.conda sha256: 44e6a7c1e3c1ea78614644cda41af6df2f9800dc8c7d313729138d96c87fdb18 md5: ac1d30b957982568fb034629771c94f2 @@ -7261,17 +7299,17 @@ packages: - pkg:pypi/sortedcontainers?source=hash-mapping size: 28657 timestamp: 1738440459037 -- conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.7-pyhd8ed1ab_0.conda - sha256: 7518506cce9a736042132f307b3f4abce63bf076f5fb07c1f4e506c0b214295a - md5: fb32097c717486aa34b38a9db57eb49e +- conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8-pyhd8ed1ab_0.conda + sha256: c978576cf9366ba576349b93be1cfd9311c00537622a2f9e14ba2b90c97cae9c + md5: 18c019ccf43769d211f2cf78e9ad46c2 depends: - - python >=3.9 + - python >=3.10 license: MIT license_family: MIT purls: - - pkg:pypi/soupsieve?source=hash-mapping - size: 37773 - timestamp: 1746563720271 + - pkg:pypi/soupsieve?source=compressed-mapping + size: 37803 + timestamp: 1756330614547 - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda sha256: 570da295d421661af487f1595045760526964f41471021056e993e73089e9c41 md5: b1b505328da7a6b246787df4b5a49fbc @@ -7416,9 +7454,9 @@ packages: - pkg:pypi/toolz?source=hash-mapping size: 52475 timestamp: 1733736126261 -- conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.2-py312h4c3975b_0.conda - sha256: 891965f8e495ad5cef399db03a13df48df7add06ae131f4b77a88749c74b2060 - md5: 82dacd4832dcde0c2b7888248a3b3d7c +- conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.2-py312h4c3975b_1.conda + sha256: 7cd30a558a00293a33ab9bfe0e174311546f0a1573c9f6908553ecd9a9bc417b + md5: 66b988f7f1dc9fcc9541483cb0ab985b depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 @@ -7427,12 +7465,12 @@ packages: license: Apache-2.0 license_family: Apache purls: - - pkg:pypi/tornado?source=compressed-mapping - size: 850503 - timestamp: 1754732194289 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.2-py312h163523d_0.conda - sha256: 82ceea2527ac484f5c8d7dee95033935b7fecb0b42afb2d9538f7397404aa6d8 - md5: 181a5ca410bad66be792da0e11038016 + - pkg:pypi/tornado?source=hash-mapping + size: 850925 + timestamp: 1756855054247 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.2-py312h163523d_1.conda + sha256: 00e9adcab3564cc579af09c6089c60e5abf5b1fbdca5e4f0fa7299d90f35dc13 + md5: e5f3e0a27abcae26a90645dfff8d68a4 depends: - __osx >=11.0 - python >=3.12,<3.13.0a0 @@ -7442,8 +7480,8 @@ packages: license_family: Apache purls: - pkg:pypi/tornado?source=hash-mapping - size: 853490 - timestamp: 1754732280524 + size: 850838 + timestamp: 1756855106235 - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda sha256: 11e2c85468ae9902d24a27137b6b39b4a78099806e551d390e394a8c34b48e40 md5: 9efbfdc37242619130ea42b1cc4ed861 @@ -7466,18 +7504,19 @@ packages: - pkg:pypi/traitlets?source=hash-mapping size: 110051 timestamp: 1733367480074 -- conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.9-pyhd8ed1ab_0.conda - sha256: 20239fe58a8cce4f5ccb1985bbfacfa630f04e6d6c8c406bbb44d49da5705483 - md5: 14e2a9a27f168d52591755618a0b9c5b +- conda: https://conda.anaconda.org/conda-forge/noarch/treescope-0.1.10-pyhcf101f3_0.conda + sha256: 65e0ae292eded21b0c48645c9ca8db305fbceebc83f0f0cb90d536a3d57c28f2 + md5: 1ffcc20967a4e3bbea41ab3ba1af663f depends: - - numpy >=1.25.2 - python >=3.10 + - numpy >=1.25.2 + - python license: Apache-2.0 license_family: APACHE purls: - pkg:pypi/treescope?source=hash-mapping - size: 121194 - timestamp: 1739897197893 + size: 131315 + timestamp: 1755968402632 - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20250822-pyhd8ed1ab_0.conda sha256: dfdf6e3dea87c873a86cfa47f7cba6ffb500bad576d083b3de6ad1b17e1a59c3 md5: 5e9220c892fe069da8de2b9c63663319 @@ -7488,16 +7527,16 @@ packages: - pkg:pypi/types-python-dateutil?source=hash-mapping size: 24939 timestamp: 1755865615651 -- conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.14.1-h4440ef1_0.conda - sha256: 349951278fa8d0860ec6b61fcdc1e6f604e6fce74fabf73af2e39a37979d0223 - md5: 75be1a943e0a7f99fcf118309092c635 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda + sha256: 7c2df5721c742c2a47b2c8f960e718c930031663ac1174da67c1ed5999f7938c + md5: edd329d7d3a4ab45dcf905899a7a6115 depends: - - typing_extensions ==4.14.1 pyhe01879c_0 + - typing_extensions ==4.15.0 pyhcf101f3_0 license: PSF-2.0 license_family: PSF purls: [] - size: 90486 - timestamp: 1751643513473 + size: 91383 + timestamp: 1756220668932 - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda sha256: 4259a7502aea516c762ca8f3b8291b0d4114e094bdb3baae3171ccc0900e722f md5: e0c3cd765dc15751ee2f0b03cd015712 @@ -7510,18 +7549,18 @@ packages: - pkg:pypi/typing-inspection?source=hash-mapping size: 18809 timestamp: 1747870776989 -- conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.14.1-pyhe01879c_0.conda - sha256: 4f52390e331ea8b9019b87effaebc4f80c6466d09f68453f52d5cdc2a3e1194f - md5: e523f4f1e980ed7a4240d7e27e9ec81f +- conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + sha256: 032271135bca55aeb156cee361c81350c6f3fb203f57d024d7e5a1fc9ef18731 + md5: 0caa1af407ecff61170c9437a808404d depends: - - python >=3.9 + - python >=3.10 - python license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/typing-extensions?source=hash-mapping - size: 51065 - timestamp: 1751643513473 + - pkg:pypi/typing-extensions?source=compressed-mapping + size: 51692 + timestamp: 1756220668932 - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda sha256: 3088d5d873411a56bf988eee774559335749aed6f6c28e07bf933256afb9eb6c md5: f6d7aa696c67756a650e91e15e88223c @@ -7555,23 +7594,23 @@ packages: - pkg:pypi/uhi?source=hash-mapping size: 35119 timestamp: 1755818259613 -- conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda - sha256: 638916105a836973593547ba5cf4891d1f2cb82d1cf14354fcef93fd5b941cdc - md5: 617f5d608ff8c28ad546e5d9671cbb95 +- conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h4c3975b_1.conda + sha256: cbf7d13819cf526a094f0cfe2da7f7ba22c4fbae4d231c9004520fbbf93f7027 + md5: 4da303c1e91703d178817252615ca0a7 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 + - libgcc >=14 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: Apache purls: - pkg:pypi/unicodedata2?source=hash-mapping - size: 404401 - timestamp: 1736692621599 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312hea69d52_0.conda - sha256: c6ca9ea11eecc650df4bce4b3daa843821def6d753eeab6d81de35bb43f9d984 - md5: 9a835052506b91ea8f0d8e352cd12246 + size: 404974 + timestamp: 1756494558558 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312h163523d_1.conda + sha256: 6620dfce63e763460d586f8b04c986e2318a58d6be06f30591a0d41902dd39cf + md5: e7e792c655daeca54a98cf44fe0bbb5a depends: - __osx >=11.0 - python >=3.12,<3.13.0a0 @@ -7581,18 +7620,18 @@ packages: license_family: Apache purls: - pkg:pypi/unicodedata2?source=hash-mapping - size: 409745 - timestamp: 1736692768349 -- conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.3-pyhe01879c_0.conda - sha256: 7ef2830f06ecd5a2ba72c3207d66af657df891a8a34c0ad63a975f50bb11ea84 - md5: 99072b2b0e29981272cdf58cb1d6f065 + size: 410699 + timestamp: 1756494753956 +- conda: https://conda.anaconda.org/conda-forge/noarch/uproot-5.6.4-pyhe01879c_0.conda + sha256: 3bb6887bf11540d7b265709d623ec77a51915b8fca0fdd16bf2f01f56e6267de + md5: bc45ee2982dea85937f5f54e8b303af0 depends: - python >=3.9 - awkward >=2.4.6 - cramjam >=2.5.0 - python-xxhash - numpy - - fsspec + - fsspec !=2025.7.0 - packaging - typing_extensions >=4.1.0 - python @@ -7600,8 +7639,8 @@ packages: license_family: BSD purls: - pkg:pypi/uproot?source=hash-mapping - size: 271170 - timestamp: 1751970080607 + size: 271997 + timestamp: 1754856697980 - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda sha256: e0eb6c8daf892b3056f08416a96d68b0a358b7c46b99c8a50481b22631a4dfc0 md5: e7cb0f5745e4c5035a460248334af7eb @@ -7890,33 +7929,34 @@ packages: - pkg:pypi/yarl?source=hash-mapping size: 144423 timestamp: 1749555083669 -- conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda - sha256: a4dc72c96848f764bb5a5176aa93dd1e9b9e52804137b99daeebba277b31ea10 - md5: 3947a35e916fcc6b9825449affbf4214 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3989a48_8.conda + sha256: 7e6c633537f4d80a57ad959eeddeef28ff731c96712e151b0160cff04beec72f + md5: f181964ddc6cf678a478e782043598c2 depends: + - libgcc >=14 + - libstdcxx >=14 + - libgcc >=14 - __glibc >=2.17,<3.0.a0 - krb5 >=1.21.3,<1.22.0a0 - - libgcc >=13 - libsodium >=1.0.20,<1.0.21.0a0 - - libstdcxx >=13 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 335400 - timestamp: 1731585026517 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc1bb282_7.conda - sha256: 9e585569fe2e7d3bea71972cd4b9f06b1a7ab8fa7c5139f92a31cbceecf25a8a - md5: f7e6b65943cb73bce0143737fded08f1 + size: 309341 + timestamp: 1757042827773 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc0cf3cd_8.conda + sha256: f6472d0e300256702a5911de5e83e0f714ccf449c63b7d11a6de2fff7496ab8e + md5: 51521e723a51751a032f8f3797101093 depends: - __osx >=11.0 + - libcxx >=19 - krb5 >=1.21.3,<1.22.0a0 - - libcxx >=18 - libsodium >=1.0.20,<1.0.21.0a0 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 281565 - timestamp: 1731585108039 + size: 243880 + timestamp: 1757042936223 - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda sha256: 5488542dceeb9f2874e726646548ecc5608060934d6f9ceaa7c6a48c61f9cc8d md5: e52c2ef711ccf31bb7f70ca87d144b9e @@ -7962,36 +8002,40 @@ packages: purls: [] size: 77606 timestamp: 1727963209370 -- conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h66e93f0_2.conda - sha256: ff62d2e1ed98a3ec18de7e5cf26c0634fd338cb87304cf03ad8cbafe6fe674ba - md5: 630db208bc7bbb96725ce9832c7423bb +- conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.24.0-py312h3fa7853_1.conda + sha256: 0c9a5cd2a38361af58d29351dcaa9b16f45784b885562875ed96be315d025439 + md5: e14ae4525748c648ba9cc6b6116349b6 depends: - __glibc >=2.17,<3.0.a0 - cffi >=1.11 - - libgcc >=13 + - libgcc >=14 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - zstd >=1.5.7,<1.5.8.0a0 + - zstd >=1.5.7,<1.6.0a0 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/zstandard?source=hash-mapping - size: 732224 - timestamp: 1745869780524 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.23.0-py312hea69d52_2.conda - sha256: c499a2639c2981ac2fd33bae2d86c15d896bc7524f1c5651a7d3b088263f7810 - md5: ba0eb639914e4033e090b46f53bec31c + - pkg:pypi/zstandard?source=compressed-mapping + size: 424205 + timestamp: 1756841111538 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.24.0-py312h26de6b3_1.conda + sha256: d84e71855f8f0168e0e6f2b9c8eefc36d4df5b95f6f3e85ae85bd3c3e5132fc1 + md5: 1c7500a891878a61a136605b711af46b depends: - __osx >=11.0 - cffi >=1.11 - python >=3.12,<3.13.0a0 - python >=3.12,<3.13.0a0 *_cpython - python_abi 3.12.* *_cp312 + - zstd >=1.5.7,<1.5.8.0a0 + - zstd >=1.5.7,<1.6.0a0 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/zstandard?source=hash-mapping - size: 532173 - timestamp: 1745870087418 + - pkg:pypi/zstandard?source=compressed-mapping + size: 345168 + timestamp: 1756841514802 - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb8e6e7a_2.conda sha256: a4166e3d8ff4e35932510aaff7aa90772f84b4d07e9f6f83c614cba7ceefe0eb md5: 6432cb5d4ac0046c3ac0a8a0f95842f9 diff --git a/pixi.toml b/pixi.toml index 9ad6df5..44e68a2 100644 --- a/pixi.toml +++ b/pixi.toml @@ -11,7 +11,7 @@ version = "0.1.0" awkward = "==2.8.2" cabinetry = ">=0.6.0,<0.7" cloudpickle = ">=3.1.1,<4" -coffea = ">=2024,<2025" +coffea = ">=2025.7.3,<2026" correctionlib = ">=2.7.0,<3" dask-awkward = ">=2025.5.0,<2026" equinox = ">=0.13.0,<0.14" @@ -36,6 +36,8 @@ python = ">=3.12.11,<3.13" xrootd = ">=5.8.1,<6" optimistix = ">=0.0.10" ipython = ">=9.4.0,<10" +fsspec-xrootd = ">=0.4.0" +rich = ">=14.1.0,<15" [pypi-dependencies] relaxed = { git = "https://github.com/pfackeldey/relaxed.git", branch = "fixes_for_zprime" } @@ -53,4 +55,4 @@ description = "Get started with the repository" depends-on = ["lab"] [environments] -lab = ["lab"] +lab = ["lab"] \ No newline at end of file diff --git a/user/configuration.py b/user/configuration.py index 337365d..1b50926 100644 --- a/user/configuration.py +++ b/user/configuration.py @@ -9,6 +9,7 @@ ) from user.observables import get_mtt, get_mva_vars from user.systematics import jet_pt_resolution, jet_pt_scale +from user.skim import dataset_manager_config, skimming_config # ============================================================================== @@ -36,23 +37,23 @@ # ============================================================================== general_config = { - "lumi": 16400, - "weights_branch": "genWeight", - "max_files": -1, - "analysis": "diff", - "run_preprocessing": False, - "run_histogramming": False, - "run_statistics": False, - "run_systematics": False, - "run_plots_only": False, - "run_mva_training": True, - "read_from_cache": True, - "output_dir": "outputs/traced_zprime_with_jax_nn/", - "preprocessed_dir": "./preproc_uproot/z-prime-ttbar-data/", - "processor": "uproot", - "lumifile": "./corrections/Cert_271036-284044_13TeV_Legacy2016_"\ - "Collisions16_JSON.txt", - "cache_dir": "/tmp/gradients_analysis/", + "lumi": 16400, + "weights_branch": "genWeight", + "max_files": -1, + "analysis": "diff", + "run_skimming": False, + "run_histogramming": False, + "run_statistics": False, + "run_systematics": False, + "run_plots_only": False, + "run_mva_training": True, + "run_metadata_generation": True, + "read_from_cache": True, + "output_dir": "outputs/test_metadata/", + "processor": "uproot", + "lumifile": "./corrections/Cert_271036-284044_13TeV_Legacy2016_"\ + "Collisions16_JSON.txt", + "cache_dir": "/tmp/gradients_analysis/", } # ============================================================================== @@ -60,20 +61,21 @@ # ============================================================================== preprocess_config = { - "branches": { - "Muon": ["pt", "eta", "phi", "mass", "miniIsoId", "tightId", "charge"], - "FatJet": ["particleNet_TvsQCD", "pt", "eta", "phi", "mass"], - "Jet": ["btagDeepB", "jetId", "pt", "eta", "phi", "mass"], - "PuppiMET": ["pt", "phi"], - "HLT": ["TkMu50"], - "Pileup": ["nTrueInt"], - "event": ["genWeight", "run", "luminosityBlock"], - }, - "ignore_missing": False, # is this implemented? - "mc_branches": { - "event": ["genWeight", "luminosityBlock"], - "Pileup": ["nTrueInt"], - }, + "branches": { + "Muon": ["pt", "eta", "phi", "mass", "miniIsoId", "tightId", "charge"], + "FatJet": ["particleNet_TvsQCD", "pt", "eta", "phi", "mass"], + "Jet": ["btagDeepB", "jetId", "pt", "eta", "phi", "mass"], + "PuppiMET": ["pt", "phi"], + "HLT": ["TkMu50"], + "Pileup": ["nTrueInt"], + "event": ["genWeight", "run", "luminosityBlock", "event"], + }, + "ignore_missing": False, # is this implemented? + "mc_branches": { + "event": ["genWeight"], + "Pileup": ["nTrueInt"], + }, + "skimming": skimming_config, } # ============================================================================== @@ -249,7 +251,7 @@ { "name": "wjets_vs_ttbar_nn", "use_in_diff": True, - "epochs": 1000, + "epochs": 500, "framework": "jax", # keras/tf/... if TF need more info # (e.g. Model: Sequential layers: Dense) "validation_split": 0.2, @@ -439,4 +441,5 @@ "systematics": systematics_config, "statistics": statistics_config, "plotting": plotting_config, + "datasets": dataset_manager_config, } diff --git a/user/skim.py b/user/skim.py new file mode 100644 index 0000000..a88ec86 --- /dev/null +++ b/user/skim.py @@ -0,0 +1,120 @@ +""" +Skimming configuration and selection functions for the Z-prime ttbar analysis. + +This module contains all skimming-related configuration including: +- Dataset definitions with cross-sections and paths +- Skimming selection functions +- Skimming configuration parameters +""" + +import awkward as ak +from coffea.analysis_tools import PackedSelection + + +# ============================================================================== +# Dataset Configuration +# ============================================================================== + +datasets_config = [ + { + "name": "signal", + "directory": "datasets/signal/m2000_w20/", + "cross_section": 1.0, + "file_pattern": "*.txt", + "tree_name": "Events", + "weight_branch": "genWeight" + }, + { + "name": "ttbar_semilep", + "directory": "datasets/ttbar_semilep/", + "cross_section": 831.76 * 0.438, # 364.35 + "file_pattern": "*.txt", + "tree_name": "Events", + "weight_branch": "genWeight" + }, + { + "name": "ttbar_had", + "directory": "datasets/ttbar_had/", + "cross_section": 831.76 * 0.457, # 380.11 + "file_pattern": "*.txt", + "tree_name": "Events", + "weight_branch": "genWeight" + }, + { + "name": "ttbar_lep", + "directory": "datasets/ttbar_lep/", + "cross_section": 831.76 * 0.105, # 87.33 + "file_pattern": "*.txt", + "tree_name": "Events", + "weight_branch": "genWeight" + }, + { + "name": "wjets", + "directory": "datasets/wjets/", + "cross_section": 61526.7, + "file_pattern": "*.txt", + "tree_name": "Events", + "weight_branch": "genWeight" + }, + { + "name": "data", + "directory": "datasets/data/", + "cross_section": 1.0, + "file_pattern": "*.txt", + "tree_name": "Events", + "weight_branch": "genWeight" + } +] + +# ============================================================================== +# Dataset Manager Configuration +# ============================================================================== + +dataset_manager_config = { + "datasets": datasets_config, + "metadata_output_dir": "outputs/test_metadata/skimmed/nanoaods_jsons/", + "max_files": None # No limit by default +} + +# ============================================================================== +# Skimming Configuration +# ============================================================================== + + +def default_skim_selection(muons, puppimet, hlt): + """ + Default skimming selection function. + + Applies basic trigger, muon, and MET requirements for skimming. + This matches the hardcoded behavior from the original preprocessing. + """ + + selection = PackedSelection() + + # Muon selection (matching hardcoded behavior) + mu_sel = ( + (muons.pt > 55) + & (abs(muons.eta) < 2.4) + & muons.tightId + & (muons.miniIsoId > 1) + ) + muon_count = ak.sum(mu_sel, axis=1) + + # Individual cuts + selection.add("trigger", hlt.TkMu50) + #selection.add("exactly_1_good_muon", muon_count == 1) + selection.add("met_cut", puppimet.pt > 50) + + # Combined skimming selection + selection.add("skim", selection.all("trigger", "met_cut")) + + return selection + + +skimming_config = { + "selection_function": default_skim_selection, + "selection_use": [("Muon", None), ("PuppiMET", None), ("HLT", None)], + "output_dir": "skimmed_test/", + "chunk_size": 100_000, + "tree_name": "Events", +} diff --git a/utils/__init__.py b/utils/__init__.py index f40413c..5298fce 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,19 +1,15 @@ -from . import build_fileset_json as build_fileset_json -from . import input_files as input_files +from . import metadata_extractor as metadata_extractor from . import jax_stats as jax_stats from . import output_files as output_files -from . import preproc as preproc from . import schema as schema __all__ = [ "build_fileset_json", "configuration", "cuts", - "input_files", "jax_stats", "observables", "output_files", - "preproc", "schema", "systematics", ] diff --git a/utils/build_fileset_json.py b/utils/build_fileset_json.py deleted file mode 100644 index be3b7d6..0000000 --- a/utils/build_fileset_json.py +++ /dev/null @@ -1,265 +0,0 @@ -""" -Utility module to generate JSON metadata for NanoAOD ROOT datasets -used in ZprimeTtbar analysis. - -This script provides: - - `get_root_file_paths`: Read .txt listings to gather ROOT file paths. - - `count_events_in_files`: Query each ROOT file for total and weighted event counts. - - `NanoAODMetadataGenerator`: Class-based API to build and export metadata. - - CLI entrypoint for standalone execution. - -Original inspiration from: -https://github.com/iris-hep/analysis-grand-challenge/blob/main/datasets/ -cms-open-data-2015/build_ntuple_json.py -""" - -import json -import logging -import time -from collections import defaultdict -from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union - -import awkward as ak -import uproot - - -# Configure module-level logger -logger = logging.getLogger(__name__) -logger.addHandler(logging.NullHandler()) - - -# Default directory mapping for each physics process -DEFAULT_DATASET_DIRECTORIES: Dict[str, Path] = { - "signal": Path("datasets/signal/m400_w40/"), - "ttbar_semilep": Path("datasets/ttbar_semilep/"), - "ttbar_had": Path("datasets/ttbar_had/"), - "ttbar_lep": Path("datasets/ttbar_lep/"), - "wjets": Path("datasets/wjets/"), - "data": Path("datasets/data/"), -} - - -def get_root_file_paths( - directory: Union[str, Path], - identifiers: Optional[Union[int, List[int]]] = None, -) -> List[Path]: - """ - Collect ROOT file paths from text listings in a directory. - - Searches for `*.txt` files in the specified directory (or specific - `.txt` files if `identifiers` is given) and reads each line as a - ROOT file path. - - Parameters - ---------- - directory : str or Path - Path to the folder containing text listing files. - identifiers : int or list of ints, optional - Specific listing file IDs (without `.txt`) to process. If `None`, all - `.txt` files in the folder are used. - - Returns - ------- - List[Path] - Resolved list of ROOT file paths. - - Raises - ------ - FileNotFoundError - If no listing files are found or any specified file is missing. - """ - dir_path = Path(directory) - # Determine which text files to parse - if identifiers is None: - listing_files = list(dir_path.glob("*.txt")) - else: - ids = [identifiers] if isinstance(identifiers, int) else identifiers - listing_files = [dir_path / f"{i}.txt" for i in ids] - - if not listing_files: - raise FileNotFoundError(f"No listing files found in {dir_path}") - - root_paths: List[Path] = [] - for txt_file in listing_files: - if not txt_file.is_file(): - raise FileNotFoundError(f"Missing listing file: {txt_file}") - # Read all non-empty lines as file paths - for line in txt_file.read_text().splitlines(): - path_str = line.strip() - if path_str: - root_paths.append(Path(path_str)) - - return root_paths - - -def count_events_in_files(files: List[Path]) -> Tuple[List[int], List[float]]: - """ - Query ROOT files for event counts and sum of generator weights. - - Opens each file with uproot, reads the "Events" TTree, - and accumulates the number of entries and sum of "genWeight". - - Parameters - ---------- - files : list of Path - Paths to ROOT files to inspect. - - Returns - ------- - num_entries : list of int - Number of events in each file's "Events" tree. - sum_weights : list of float - Total of `genWeight` values per file. - """ - num_entries: List[int] = [] - sum_weights: List[float] = [] - start_time = time.time() - - for idx, file_path in enumerate(files): - # Log progress every 10 files and at completion - if idx % 10 == 0 or idx == len(files) - 1: - elapsed = int(time.time() - start_time) - logger.info( - f"Reading file {idx+1}/{len(files)} ({elapsed}s elapsed)" - ) - try: - with uproot.open(file_path) as root_file: - tree = root_file["Events"] - num_entries.append(tree.num_entries) - weights = tree["genWeight"].array(library="ak") - sum_weights.append(float(ak.sum(weights))) - except Exception as err: - logger.warning(f"Error reading {file_path}: {err}") - num_entries.append(0) - sum_weights.append(0.0) - - return num_entries, sum_weights - - -class NanoAODMetadataGenerator: - """ - Class-based API to build and export metadata for NanoAOD datasets. - - Attributes - ---------- - process_directories : Dict[str, Path] - Map from process name to directory of `.txt` listings. - output_directory : Path - Directory where individual JSON files and master index will be written. - - Methods - ------- - get_metadata(identifiers=None) - Build metadata dict without writing to disk. - run(identifiers=None) - Generate metadata dict and write JSON files. - """ - - def __init__( - self, - process_directories: Optional[Dict[str, Union[str, Path]]] = None, - output_directory: Union[ - str, Path - ] = "datasets/nanoaods_jsons_per_process", - ): - # Initialize mapping from process to directory path - raw_map = process_directories or DEFAULT_DATASET_DIRECTORIES - self.process_directories: Dict[str, Path] = { - name: Path(path) for name, path in raw_map.items() - } - # Ensure output directory exists - self.output_directory = Path(output_directory) - self.output_directory.mkdir(parents=True, exist_ok=True) - - def get_metadata( - self, identifiers: Optional[Union[int, List[int]]] = None - ) -> Dict[str, Dict[str, Any]]: - """ - Assemble metadata for each process/variation without file I/O. - - Parameters - ---------- - identifiers : int or list of ints, optional - Specific listing IDs to process. If None, all listings are used. - - Returns - ------- - metadata : dict - Nested structure: metadata[process]["nominal"] = { - "files": [ {"path": str, "nevts": int, "nevts_wt": float}, ... ], - "nevts_total": int, - "nevts_wt_total": float - } - """ - results: Dict[str, Dict[str, Any]] = defaultdict(dict) - - for process_name, listing_dir in self.process_directories.items(): - logger.info(f"Processing process: {process_name}") - try: - file_paths = get_root_file_paths(listing_dir, identifiers) - except FileNotFoundError as fnf: - logger.error(fnf) - continue - - entries_count, weight_sums = count_events_in_files(file_paths) - variation_label = "nominal" - file_records = [ - {"path": str(fp), "nevts": cnt, "nevts_wt": wt} - for fp, cnt, wt in zip(file_paths, entries_count, weight_sums) - ] - - results[process_name][variation_label] = { - "files": file_records, - "nevts_total": sum(entries_count), - "nevts_wt_total": sum(weight_sums), - } - - return results - - def run(self, identifiers: Optional[Union[int, List[int]]] = None) -> None: - """ - Generate metadata and write individual JSON files and a master index. - - Parameters - ---------- - identifiers : int or list of ints, optional - Specific listing IDs to process. If None, all listings are used. - """ - metadata = self.get_metadata(identifiers) - - # Write per-process JSON files - for process_name, variations in metadata.items(): - for variation_label, data in variations.items(): - output_file = ( - self.output_directory - / f"nanoaods_{process_name}_{variation_label}.json" - ) - with output_file.open("w") as json_f: - json.dump( - {process_name: {variation_label: data}}, - json_f, - indent=4, - ) - logger.debug(f"Wrote file: {output_file}") - - # Write master metadata index - master_file = Path("datasets/nanoaods.json") - master_file.parent.mkdir(parents=True, exist_ok=True) - with master_file.open("w") as mfile: - json.dump(metadata, mfile, indent=4) - logger.info(f"Master metadata written to {master_file}") - - -# CLI entrypoint for standalone usage -def main() -> None: - """ - Command-line interface: instantiate the generator and run. - """ - logging.basicConfig(level=logging.INFO) - generator = NanoAODMetadataGenerator() - generator.run() - - -if __name__ == "__main__": - main() diff --git a/utils/datasets.py b/utils/datasets.py new file mode 100644 index 0000000..f132951 --- /dev/null +++ b/utils/datasets.py @@ -0,0 +1,150 @@ +""" +Centralized dataset management with configurable paths, cross-sections, and metadata. + +This module provides a configurable dataset manager that replaces hardcoded paths +and cross-sections throughout the codebase, making the framework more flexible +and maintainable. +""" + +import json +import logging +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from utils.schema import DatasetConfig, DatasetManagerConfig + +logger = logging.getLogger(__name__) + + +class ConfigurableDatasetManager: + """ + Manages dataset paths, cross-sections, and metadata from configuration. + + This class replaces hardcoded dataset directories and cross-section maps + with a flexible, configuration-driven approach. + """ + + def __init__(self, config: DatasetManagerConfig): + """ + Initialize the dataset manager with configuration. + + Parameters + ---------- + config : DatasetManagerConfig + Configuration containing dataset definitions and paths. + """ + self.config = config + self.datasets = {ds.name: ds for ds in config.datasets} + logger.info(f"Initialized dataset manager with {len(self.datasets)} datasets") + + def get_cross_section(self, process: str) -> float: + """ + Get cross-section from config instead of hardcoded map. + + Parameters + ---------- + process : str + Process name (e.g., 'signal', 'ttbar_semilep', etc.) + + Returns + ------- + float + Cross-section in picobarns + + Raises + ------ + KeyError + If process is not found in configuration + """ + if process not in self.datasets: + raise KeyError(f"Process '{process}' not found in dataset configuration") + return self.datasets[process].cross_section + + def get_dataset_directory(self, process: str) -> Path: + """ + Get dataset directory containing text files with file lists. + + Parameters + ---------- + process : str + Process name + + Returns + ------- + Path + Path to directory containing .txt files with file lists + """ + if process not in self.datasets: + raise KeyError(f"Process '{process}' not found in dataset configuration") + return Path(self.datasets[process].directory) + + def get_tree_name(self, process: str) -> str: + """ + Get ROOT tree name from config. + + Parameters + ---------- + process : str + Process name + + Returns + ------- + str + ROOT tree name + """ + if process not in self.datasets: + raise KeyError(f"Process '{process}' not found in dataset configuration") + return self.datasets[process].tree_name + + def get_cross_section_map(self) -> Dict[str, float]: + """ + Get a dictionary mapping all process names to their cross-sections. + + This provides backward compatibility with code expecting a cross-section map. + + Returns + ------- + dict + Mapping of process names to cross-sections + """ + return {name: ds.cross_section for name, ds in self.datasets.items()} + + def get_dataset_directories_map(self) -> Dict[str, Path]: + """ + Get a dictionary mapping all process names to their directories. + + This provides backward compatibility with code expecting a directory map. + + Returns + ------- + dict + Mapping of process names to directory paths containing .txt files + """ + return {name: Path(ds.directory) for name, ds in self.datasets.items()} + + def list_processes(self) -> List[str]: + """ + Get list of all configured process names. + + Returns + ------- + list + List of process names + """ + return list(self.datasets.keys()) + + def validate_process(self, process: str) -> bool: + """ + Check if a process is configured. + + Parameters + ---------- + process : str + Process name to check + + Returns + ------- + bool + True if process is configured, False otherwise + """ + return process in self.datasets diff --git a/utils/evm_stats.py b/utils/evm_stats.py index 8b0f2b9..7ecc3ed 100644 --- a/utils/evm_stats.py +++ b/utils/evm_stats.py @@ -48,7 +48,7 @@ def update(params: Params, values: PyTree[FScalar]) -> Params: def model_per_channel(params: Params, hists: Hists1D) -> Hists1D: # we put all modifiers into a list, so that we can compose them for application (scaling). # composing is important! it ensures that there's no order dependency in the application of the modifiers, - # and it allows us to apply multiple modifiers at once through batching (vmap) of the same modifier types, + # and it allows us to apply multiple modifiers at once through batching (vmap) of the same modifier types, # which greatly improves performance and reduces compiletime. out = {} @@ -133,14 +133,14 @@ def optx_loss(dynamic, args): ) # NLL nll = total_loss(fitresult.value, static, channels) - + # bestfit parameters bestfit_params = evm.tree.combine(fitresult.value, static) # bestfit parameter uncertainties # We use the Cramer-Rao bound to estimate uncertainties # use the bestfit parameters to compute the uncertainties, and split it by value of the parameters - # we explicitly not use `filter=evm.filter.is_not_frozen` here, because we want to compute uncertainties + # we explicitly not use `filter=evm.filter.is_not_frozen` here, because we want to compute uncertainties # for all parameters, not just the "unfrozen" ones dynamic, static = evm.tree.partition(bestfit_params, filter=evm.filter.is_value) bestfit_params_uncertainties = evm.loss.cramer_rao_uncertainty( @@ -365,7 +365,7 @@ def compute_discovery_pvalue( if not channels: logger.error("Discovery calculation aborted: no valid channels") return jnp.array(0.0), {} - + # update the internal evm parameters with the provided values optimized by the analysis # the internal evm_params have much more information that is needed for the fit (like names, constraints, etc), but they are not supposed to be trainable params = update(evm_params, parameters) diff --git a/utils/input_files.py b/utils/input_files.py deleted file mode 100644 index b4bfad6..0000000 --- a/utils/input_files.py +++ /dev/null @@ -1,167 +0,0 @@ -import json -import logging -from pathlib import Path -from typing import Any, Dict, Union - -from tabulate import tabulate - -defaul_dataset_json = Path("datasets/nanoaods.json") - -# Configure module-level logger -logger = logging.getLogger(__name__) - - -def construct_fileset( - max_files_per_sample: int, - preprocessor: str = "uproot", - json_path: Union[str, Path] = defaul_dataset_json, -) -> Dict[str, Dict[str, Any]]: - """ - Build a structured fileset mapping for physics analyses including - file paths and metadata. - - This function reads dataset definitions from a JSON file and constructs a nested - dictionary where each key is "__" and values contain: - - files: a mapping of file or glob patterns to ROOT TTrees - - metadata: information on event counts, cross-sections, etc. - - Parameters - ---------- - max_files_per_sample : int - Maximum number of files to include for each sample. Use -1 to include all files. - preprocessor : str, optional - Type of file access to prepare. Supported values: - - "uproot": use glob patterns for directory-level access - - other: list each file individually - Default is "uproot". - json_path : str or Path, optional - Path to the JSON configuration file specifying samples - variations and file lists. - Defaults to 'datasets/nanoaods.json'. - - Returns - ------- - fileset : dict - Nested mapping where each key "__" maps to: - - files (dict): {file_path_or_pattern: "Events"} - - metadata (dict): { - "process": str, - "variation": str, - "nevts": int, - "nevts_wt": float, - "xsec": float, - } - - Raises - ------ - FileNotFoundError - If the JSON configuration file does not exist. - ValueError - If `max_files_per_sample` is less than -1. - JSONDecodeError - If the JSON file cannot be parsed. - """ - # Validate inputs - if max_files_per_sample < -1: - raise ValueError( - f"max_files_per_sample must be -1 or non-negative; " - f"got {max_files_per_sample}" - ) - - json_file = Path(json_path) - if not json_file.is_file(): - raise FileNotFoundError(f"Dataset JSON file not found: {json_file}") - - # Load dataset definitions - with json_file.open("r") as f: - dataset_info = json.load(f) - - # Cross-section lookup (in picobarns) - cross_section_map: Dict[str, float] = { - "signal": 1.0, - "ttbar_semilep": 831.76 * 0.438, - "ttbar_had": 831.76 * 0.457, - "ttbar_lep": 831.76 * 0.105, - "wjets": 61526.7, - "data": 1.0, - } - - fileset: Dict[str, Dict[str, Any]] = {} - - # Iterate over each process and its systematic variations - for process_name, variations in dataset_info.items(): - for variation_name, info in variations.items(): - # Extract raw file entries - raw_entries = info.get("files", []) - - # Limit number of files if requested - if max_files_per_sample != -1: - raw_entries = raw_entries[:max_files_per_sample] - - # Compute total event counts - total_events = sum(entry.get("nevts", 0) for entry in raw_entries) - total_weighted = sum( - entry.get("nevts_wt", 0.0) for entry in raw_entries - ) - - # Prepare metadata dict - metadata = { - "process": process_name, - "variation": variation_name, - "nevts": total_events, - "nevts_wt": total_weighted, - "xsec": cross_section_map.get(process_name, 0.0), - } - - # Determine file path patterns or explicit paths - if preprocessor == "uproot": - # Use glob pattern for directory-based access - if process_name == "data": - # CMS public EOS path for collision data - base_pattern = ( - "root://eospublic.cern.ch//eos/opendata/cms/" - "Run2016*/SingleMuon/NANOAOD/" - "UL2016_MiniAODv2_NanoAODv9-v1" - ) - else: - # Deduce directory from first file path - first_path = raw_entries[0].get("path", "") - base_pattern = str(Path(first_path).parents[1]) - - file_map = {f"{base_pattern}/*/*.root": "Events"} - else: - # Explicit file listings for other preprocessors - file_map = { - entry.get("path", ""): "Events" for entry in raw_entries - } - - key = f"{process_name}__{variation_name}" - fileset[key] = {"files": file_map, "metadata": metadata} - - logger.debug( - f"Added fileset entry: {key} with {len(file_map)} files" - ) - - logger.info(f"Constructed fileset with {len(fileset)} entries.") - - # --- Add summary table --- - summary_data = [] - headers = ["Key", "Process", "Variation", "# Files"] - for key, content in fileset.items(): - process = content["metadata"]["process"] - variation = content["metadata"]["variation"] - num_files = len(content["files"]) - summary_data.append([key, process, variation, num_files]) - - # Sort by key for consistent output - summary_data.sort(key=lambda x: x[0]) - - logger.info( - "Fileset Summary:\n" - + tabulate(summary_data, headers=headers, tablefmt="grid") - ) - - return fileset - - -# End of module diff --git a/utils/jax_stats.py b/utils/jax_stats.py index 38d5e7f..23ed095 100644 --- a/utils/jax_stats.py +++ b/utils/jax_stats.py @@ -96,7 +96,7 @@ class ChannelData(eqx.Module): - Bin edges are stored for visualization/rebinning purposes """ - name: str = eqx.static_field() # Treated as constant by JAX + name: str = eqx.field(static=True) # Treated as constant by JAX observed_counts: jnp.ndarray templates: Dict[str, jnp.ndarray] bin_edges: jnp.ndarray @@ -497,4 +497,4 @@ def compute_discovery_pvalue( return_mle_pars=True, # Return fitted nuisance parameters test_stat="q0", # Discovery test statistic ) - return p_value, mle_parameters + return p_value, (mle_parameters, {}) diff --git a/utils/logging.py b/utils/logging.py index f2294d2..8229eb2 100644 --- a/utils/logging.py +++ b/utils/logging.py @@ -1,55 +1,106 @@ import logging +from typing import Optional -# ANSI escape codes for colors +from rich.logging import RichHandler +from rich.console import Console +from rich.theme import Theme +from rich.markup import escape + +# ANSI escape codes for colors (kept for backward compatibility) BLUE = "\033[0;34m" -YELLOW = "\033[1;33m" RED = "\033[0;31m" -MAGENTA = "\033[95m" GREEN = "\033[0;32m" RESET = "\033[0m" -def _banner(text: str) -> str: +# ============================================================================= +# Console Management +# ============================================================================= + +_console = None + +def get_console() -> Console: + """Get the global Rich console instance for direct Rich output.""" + global _console + if _console is None: + custom_theme = Theme({ + "repr.path": "default", # no color for paths + "repr.filename": "default", + "log.message": "default", + }) + _console = Console(theme=custom_theme) + return _console + + +# ============================================================================= +# Specialized Logging Functions +# ============================================================================= + +def log_banner(text: str) -> str: """ - Creates a magenta-colored banner for logging. + Returns a magenta-colored banner string for use with logger. + + This function creates a formatted banner with Rich markup that will be + properly rendered by the RichHandler when logged. Parameters ---------- text : str The text to display in the banner. + Returns ------- str - A formatted string with ANSI escape codes for coloring. + Formatted banner string with Rich markup. """ - return ( - f"\n{MAGENTA}\n{'=' * 80}\n" - f"{' ' * ((80 - len(text)) // 2)}{text.upper()}\n" - f"{'=' * 80}{RESET}" - ) + # Escape the text to prevent Rich from interpreting it as markup + upper_text = text.upper() + escaped_text = escape(upper_text) + # Use original text length for centering calculation + banner_text = (f"{'=' * 80}\n" + f"{ ' ' * ((80 - len(upper_text)) // 2)}{escaped_text}\n" + f"{ '=' * 80}" + ) + return f"[magenta]{banner_text}[/magenta]" -class ColoredFormatter(logging.Formatter): - """A custom logging formatter that adds colors based on log level.""" - # The format string for the log message - log_format_prefix = "[%(levelname)s:%(name)s:%(funcName)s:L.%(lineno)d] " +# ============================================================================= +# Logger Setup +# ============================================================================= - # A dictionary to map log levels to colors - PREFIX_COLORS = { - logging.INFO: BLUE, - logging.WARNING: YELLOW, - logging.ERROR: RED, - logging.CRITICAL: RED, - } +def setup_logging(level: str = "INFO") -> None: + """ + Sets up logging with RichHandler configured for this project. + + The RichHandler is configured with markup enabled to support colored + banners and tables, but regular log messages should avoid using markup + unless specifically intended. - def format(self, record): - # Get the color for the current log level's prefix - color = self.PREFIX_COLORS.get(record.levelno, "") - prefix_formatter = logging.Formatter(self.log_format_prefix) - prefix = prefix_formatter.format(record) - colored_prefix = f"{color}{prefix}{RESET}" + Parameters + ---------- + level : str, optional + The logging level, by default "INFO" + """ + log = logging.getLogger() - # The message itself might have its own colors, which will be preserved. - message = record.getMessage().lstrip("\n") - return f"{colored_prefix}{message}" + # Check if handlers already exist to avoid duplicate logging + if log.handlers: + return + + # Use the global console instance for consistency + console = get_console() + + # Configure RichHandler with markup enabled for banners and tables + handler = RichHandler( + console=console, + rich_tracebacks=True, + show_time=True, + markup=True, # Enable markup for banners and tables + log_time_format="%H:%M:%S", + ) + handler.setFormatter( + logging.Formatter("%(message)s") + ) + log.addHandler(handler) + log.setLevel(level) diff --git a/utils/metadata_extractor.py b/utils/metadata_extractor.py new file mode 100644 index 0000000..14b6400 --- /dev/null +++ b/utils/metadata_extractor.py @@ -0,0 +1,703 @@ +""" +NanoAOD dataset metadata extraction and management. + +This module builds filesets from ROOT file listings, extracts metadata using +coffea preprocessing tools, and creates WorkItem objects that are later processed +as chunks during the skimming phase. + +Outputs three main JSON files: +- fileset.json: Maps dataset names to ROOT file paths and tree names +- workitems.json: Contains WorkItem objects with file chunks and entry ranges +- nanoaods.json: Summary of event counts per dataset and process + +""" + +# Standard library imports +import base64 +import dataclasses +import json +import logging +import sys +from collections import defaultdict +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +# Third-party imports +from coffea.processor.executor import WorkItem +from rich.pretty import pretty_repr + +# Local application imports +from utils.datasets import ConfigurableDatasetManager + + +# Configure module-level logger +logger = logging.getLogger(__name__) +logger.addHandler(logging.NullHandler()) + + +def _parse_dataset(dataset_key: str) -> Tuple[str, str]: + """ + Splits a dataset key like 'process__variation' into ('process', 'variation'). + + If no '__' is present, 'nominal' is used as the variation. + """ + if "__" in dataset_key: + proc, var = dataset_key.split("__", 1) + else: + proc, var = dataset_key, "nominal" + return proc, var + + +def get_root_file_paths( + directory: Union[str, Path], + identifiers: Optional[Union[int, List[int]]] = None, +) -> List[str]: + """ + Collects ROOT file paths from `.txt` listing files in a directory. + + Searches for `*.txt` files in the specified directory (or specific + `.txt` files if `identifiers` is given) and reads each line as a + ROOT file path. + + Parameters + ---------- + directory : str or Path + Path to the folder containing text listing files. + identifiers : int or list of ints, optional + Specific listing file IDs (without `.txt`) to process. If `None`, all + `.txt` files in the folder are used. + + Returns + ------- + List[str] + A list of ROOT file paths as strings. + + Raises + ------ + FileNotFoundError + If no listing files are found or a specified file is missing. + """ + dir_path = Path(directory) + # Determine which text files to parse + if identifiers is None: + # If no specific identifiers, glob for all .txt files + listing_files = list(dir_path.glob("*.txt")) + else: + # If identifiers are provided, construct specific file paths + ids = [identifiers] if isinstance(identifiers, int) else identifiers + listing_files = [dir_path / f"{i}.txt" for i in ids] + + # Raise error if no listing files are found + if not listing_files: + raise FileNotFoundError(f"No listing files found in {dir_path}") + + root_paths: List[str] = [] + # Iterate through each listing file + for txt_file in listing_files: + # Ensure the listing file exists + if not txt_file.is_file(): + raise FileNotFoundError(f"Missing listing file: {txt_file}") + # Read each non-empty line as a file path + for line in txt_file.read_text().splitlines(): + path_str = line.strip() + if path_str: + root_paths.append(path_str) + + return root_paths + + +class FilesetBuilder: + """ + Builds and saves a coffea-compatible fileset from dataset configurations. + + This class reads dataset listings and constructs a fileset dictionary + suitable for `coffea` processors. + + Attributes + ---------- + dataset_manager : ConfigurableDatasetManager + Manages dataset configurations, including paths and tree names. + """ + + def __init__(self, dataset_manager: ConfigurableDatasetManager): + """ + Initializes the FilesetBuilder. + + Parameters + ---------- + dataset_manager : ConfigurableDatasetManager + A dataset manager instance (required). + """ + self.dataset_manager = dataset_manager + + def build_fileset( + self, identifiers: Optional[Union[int, List[int]]] = None + ) -> Dict[str, Dict[str, Any]]: + """ + Builds a coffea-compatible fileset mapping. + + Iterates through configured processes, collects ROOT file paths, and + and constructs a dictionary where keys are dataset names (process__variation) + and values contain a mapping of file paths to tree names. + + Parameters + ---------- + identifiers : Optional[Union[int, List[int]]], optional + Specific listing file IDs (without `.txt`) to process. If `None`, all + `.txt` files in the process's listing directory are used. + + Returns + ------- + Dict[str, Dict[str, Any]] + The constructed fileset in the format: + `{dataset_name: {"files": {file_path: tree_name}}}` + """ + fileset: Dict[str, Dict[str, Any]] = {} + + max_files = self.dataset_manager.config.max_files + + if max_files and max_files <= 0: + raise ValueError("max_files must be None or a positive integer.") + + # Iterate over each process configured in the dataset manager + for process_name in self.dataset_manager.list_processes(): + logger.info(f"Building fileset for process: {process_name}") + + # Get the directory where listing files are located for this process + listing_dir = self.dataset_manager.get_dataset_directory(process_name) + # Get the tree name (e.g., "Events") for ROOT files of this process + tree_name = self.dataset_manager.get_tree_name(process_name) + + try: + # Collect all ROOT file paths from the listing files + file_paths = get_root_file_paths(listing_dir, identifiers)[:max_files] + + # Define the dataset key for coffea (process__variation) + # For now, assuming a "nominal" variation if not explicitly specified + variation_label = "nominal" + if process_name != "data": + dataset_key = f"{process_name}__{variation_label}" + else: + dataset_key = process_name + # Create the fileset entry: map each file path to its tree name + fileset[dataset_key] = { + "files": {file_path: tree_name for file_path in file_paths}, + "metadata": { + "process": process_name, + "variation": variation_label, + "xsec": self.dataset_manager.get_cross_section(process_name) + } + } + + logger.debug(f"Added {len(file_paths)} files for {dataset_key}") + + except FileNotFoundError as fnf: + # Log an error if listing files are not found and continue to next process + logger.error(f"Could not build fileset for {process_name}: {fnf}") + continue + + return fileset + + def save_fileset( + self, fileset: Dict[str, Dict[str, Any]] + ) -> None: + """ + Saves the built fileset to a JSON file. + + The output path is determined by the `metadata_output_dir` configured + in the `dataset_manager`. + + Parameters + ---------- + fileset : Dict[str, Dict[str, Any]] + The fileset mapping to save. + """ + # Construct the full path for the fileset JSON file + output_dir = Path(self.dataset_manager.config.metadata_output_dir) + output_dir.mkdir(parents=True, exist_ok=True) + fileset_path = output_dir / "fileset.json" + + # Ensure the parent directory exists + fileset_path.parent.mkdir(parents=True, exist_ok=True) + + # Write the fileset to the JSON file with pretty-printing + with fileset_path.open("w") as f: + json.dump(fileset, f, indent=4) + + logger.info(f"Fileset JSON saved to {fileset_path}") + + +class CoffeaMetadataExtractor: + """ + Extracts metadata from ROOT files using `coffea.dataset_tools.preprocess`. + + This class generates a list of `WorkItem` objects containing metadata like + objects containing metadata like file paths, entry ranges, and UUIDs. + + Attributes + ---------- + runner : coffea.processor.Runner + The coffea processor runner configured for preprocessing. + """ + + def __init__(self) -> None: + """ + Initializes the CoffeaMetadataExtractor. + + Configures a `coffea.processor.Runner` instance with an iterative executor + and NanoAOD schema for metadata extraction. + """ + # Import coffea processor and NanoAODSchema here to avoid circular imports + # or unnecessary imports if this class is not used. + from coffea import processor + from coffea.nanoevents import NanoAODSchema + + # Initialize the coffea processor Runner with an iterative executor + # and the NanoAODSchema for parsing NanoAOD files. + self.runner = processor.Runner( + executor=processor.IterativeExecutor(), + schema=NanoAODSchema, + savemetrics=True, + # Use a small chunksize for demonstration/testing to simulate multiple chunks + chunksize=100_000, + ) + + def extract_metadata( + self, fileset: Dict[str, Dict[str, str]] + ) -> List[WorkItem]: + """ + Extracts metadata from the given fileset using coffea.preprocess. + + Parameters + ---------- + fileset : Dict[str, Dict[str, str]] + A coffea-compatible fileset mapping dataset names to file paths and tree names. + + Returns + ------- + List[WorkItem] + A list of `coffea.processor.WorkItem` objects with extracted metadata. + """ + logger.info("Extracting metadata using coffea.dataset_tools.preprocess") + try: + # Run the coffea preprocess function on the provided fileset + workitems = self.runner.preprocess(fileset) + # Convert the generator returned by preprocess to a list of WorkItems + return list(workitems) + except Exception as e: + # Log any errors encountered during preprocessing + logger.error(f"Error during coffea preprocessing: {e}") + # Return an empty list to indicate failure or no metadata extracted + return [] + + +class NanoAODMetadataGenerator: + """ + Orchestrates the generation, reading, and summarization of NanoAOD metadata. + + This class combines `FilesetBuilder` and `CoffeaMetadataExtractor` to provide + a complete metadata management workflow. It can either generate new metadata + or read existing metadata from disk, storing the results as instance + attributes for easy access. + + Attributes + ---------- + dataset_manager : ConfigurableDatasetManager + Manages dataset configurations and output directories. + output_directory : Path + The base directory for all metadata JSON files. + fileset : Optional[Dict[str, Dict[str, Any]]] + The generated or read coffea-compatible fileset. + workitems : Optional[List[WorkItem]] + The generated or read list of `WorkItem` objects. + nanoaods_summary : Optional[Dict[str, Dict[str, Any]]] + The generated or read summarized NanoAOD metadata. + """ + + def __init__( + self, + dataset_manager: ConfigurableDatasetManager + ): + """ + Initializes the NanoAODMetadataGenerator. + + Parameters + ---------- + dataset_manager : ConfigurableDatasetManager + A dataset manager instance (required). + """ + self.dataset_manager = dataset_manager + + # The metadata_output_dir from the config is the canonical source. + # This directory is used for all metadata reading and writing. + self.output_directory = Path(self.dataset_manager.config.metadata_output_dir) + self.output_directory.mkdir(parents=True, exist_ok=True) + + # Initialize modularized components for fileset building and metadata extraction + self.fileset_builder = FilesetBuilder(self.dataset_manager) + self.metadata_extractor = CoffeaMetadataExtractor() + + # Attributes to store generated/read metadata. + # These will be populated by the run() method. + self.fileset: Optional[Dict[str, Dict[str, Any]]] = None + self.workitems: Optional[List[WorkItem]] = None + self.nanoaods_summary: Optional[Dict[str, Dict[str, Any]]] = None + + def _get_metadata_paths(self) -> Dict[str, Path]: + """ + Generates and returns the full paths for all metadata JSON files. + + These paths are consistently derived from the `self.output_directory` + attribute, which is set from `dataset_manager.config.metadata_output_dir` + during initialization. This ensures all read/write operations target the + same locations. + + Returns + ------- + Dict[str, Path] + A dictionary containing the paths for: + - 'fileset_path': Path to the fileset JSON (e.g., fileset.json). + - 'workitems_path': Path to the WorkItems JSON (e.g., workitems.json). + - 'nanoaods_summary_path': Path to the main NanoAODs summary JSON (e.g., nanoaods.json). + - 'process_summary_dir': Path to the directory where per-process JSONs are saved. + """ + # Get the base output directory from the instance attribute. + # This directory is created during __init__. + output_dir = self.output_directory + + # Construct and return the full paths for each metadata file + return { + "fileset_path": output_dir / "fileset.json", + "workitems_path": output_dir / "workitems.json", + "nanoaods_summary_path": output_dir / "nanoaods.json", + "process_summary_dir": output_dir, # Per-process files are saved directly in this directory + } + + def run( + self, + identifiers: Optional[Union[int, List[int]]] = None, + generate_metadata: bool = True + ) -> None: + """ + Generates or reads all metadata. + + This is the main orchestration method. If `generate_metadata` is True, it + performs a full generation workflow. Otherwise, it attempts to read + existing metadata from the expected paths. + + Parameters + ---------- + identifiers : Optional[Union[int, List[int]]], optional + Specific listing file IDs to process. Only used if `generate_metadata` is True. + generate_metadata : bool, optional + If True, generate new metadata. If False, read existing metadata. + Defaults to True. + + Raises + ------ + SystemExit + If `generate_metadata` is False and any required metadata file is not found. + """ + if generate_metadata: + logger.info("Starting metadata generation workflow...") + # Step 1: Build and save the fileset + self.fileset = self.fileset_builder.build_fileset(identifiers) + self.fileset_builder.save_fileset(self.fileset) + + # Step 2: Extract and save WorkItem metadata + self.workitems = self.metadata_extractor.extract_metadata(self.fileset) + self.write_metadata() + + # Step 3: Summarize and save NanoAODs metadata + self.summarise_nanoaods() + self.write_nanoaods_summary() + logger.info("Metadata generation complete.") + else: + logger.info(f"Skipping metadata generation - using existing metadata from \n %s", + pretty_repr(self._get_metadata_paths())) + try: + self.read_fileset() + self.read_metadata() + self.read_nanoaods_summary() + logger.info("All metadata successfully loaded from disk.") + except (FileNotFoundError, json.JSONDecodeError, KeyError) as e: + logger.error(f"Failed to load existing metadata: {e}") + logger.error("Please ensure metadata files exist or enable generation.") + sys.exit(1) + + + def write_nanoaods_summary(self) -> None: + """ + Writes the summarized NanoAOD metadata to JSON files. + + This method writes individual JSON files for each process/variation and a + master `nanoaods.json` file. + + Raises + ------ + ValueError + If `self.nanoaods_summary` has not been populated. + """ + # Check if the summary data is available + if self.nanoaods_summary is None: + raise ValueError("NanoAODs summary is not available to write. Please generate or load it first.") + + # Get all necessary output paths from the helper method + paths = self._get_metadata_paths() + process_summary_dir = paths["process_summary_dir"] + nanoaods_summary_path = paths["nanoaods_summary_path"] + + # Write per-process JSON files for detailed breakdown + for process_name, variations in self.nanoaods_summary.items(): + for variation_label, data in variations.items(): + # Construct filename for per-process summary + per_process_summary_path = ( + process_summary_dir + / f"nanoaods_{process_name}_{variation_label}.json" + ) + # Ensure the directory for the output file exists + per_process_summary_path.parent.mkdir(parents=True, exist_ok=True) + + # Write the specific process/variation data to its JSON file + with per_process_summary_path.open("w") as f: + json.dump( + {process_name: {variation_label: data}}, # Wrap in a dict for consistent structure + f, + indent=4, + ) + logger.debug(f"Wrote NanoAODs summary file: {per_process_summary_path}") + + # Write the master metadata index file containing the full aggregated summary + # This file is the primary input for analysis fileset construction + with nanoaods_summary_path.open("w") as f: + json.dump(self.nanoaods_summary, f, indent=4) + logger.info(f"NanoAODs summary written to {nanoaods_summary_path}") + + def summarise_nanoaods(self) -> None: + """ + Summarizes the extracted `WorkItem` metadata into a structured NanoAODs summary. + + This method processes `self.workitems` to aggregate event counts per + file, process, and variation, storing the result in `self.nanoaods_summary` + with the schema: + `{process_name: {variation_label: {"files": [...], "nevts_total": int}}}`. + + Raises + ------ + ValueError + If `self.workitems` has not been populated. + """ + # Ensure sample chunks are available for summarization + if self.workitems is None: + raise ValueError("Sample chunks (WorkItems) are not available to summarize. Please extract or load them first.") + + # Use self.sample_chunks directly as the source of WorkItems + workitems = self.workitems + + # Initialize a nested defaultdict to store aggregated event counts: + # structure: process -> variation -> filename -> event count + counts: Dict[str, Dict[str, Dict[str, int]]] = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) + + # Iterate through each WorkItem to extract relevant information + for wi in workitems: + # Convert WorkItem dataclass to a dictionary for easier access + wi_dict = dataclasses.asdict(wi) + + dataset = wi_dict["dataset"] # type: ignore + filename = wi_dict["filename"] + # Extract entry start and stop, default to 0 if not present + start = int(wi_dict.get("entrystart", 0)) + stop = int(wi_dict.get("entrystop", 0)) + + # Calculate number of events in this chunk, ensuring it's non-negative + nevts = max(0, stop - start) + + # Parse the dataset key to get process and variation names + proc, var = _parse_dataset(dataset) + logger.debug(f"Processing WorkItem: {proc}, {var}, {filename}, {nevts} events") + + # Aggregate event counts for the specific process, variation, and filename + counts[proc][var][filename] += nevts + + # Build the final output schema (self.nanoaods_summary) + out: Dict[str, Dict[str, Any]] = {} + for proc, per_var in counts.items(): + out[proc] = {} + for var, per_file in per_var.items(): + # Create a list of files with their event counts, sorted by path for reproducibility + files_list = [ # type: ignore + {"path": str(path), "nevts": nevts} + for path, nevts in sorted(per_file.items()) + ] # type: ignore + # Calculate the total number of events for this process and variation + nevts_total = sum(f["nevts"] for f in files_list) # type: ignore + + # Store the aggregated data in the output dictionary + out[proc][var] = { + "files": files_list, + "nevts_total": int(nevts_total), # Ensure total events is an integer + } + # Assign the generated summary to the instance attribute + self.nanoaods_summary = out + logger.info("NanoAODs summary generated.") + + def read_fileset(self) -> None: + """ + Reads the fileset from `fileset.json` and stores it. + + Raises + ------ + FileNotFoundError + If the `fileset.json` file does not exist at the expected path. + """ + # Get the canonical path for the fileset JSON file + paths = self._get_metadata_paths() + fileset_path = paths["fileset_path"] + + logger.info(f"Attempting to read fileset from {fileset_path}") + try: + # Open and load the JSON file + with fileset_path.open("r") as f: + # If max_files is set in dataset_manager, we might want to filter the fileset here + self.fileset = json.load(f) + if (max_files := self.dataset_manager.config.max_files): + for dataset, data in self.fileset.items(): + files = list(data["files"].items())[:max_files] + self.fileset[dataset]["files"] = dict(files) + + logger.info("Fileset successfully loaded.") + except FileNotFoundError as e: + # Log error and re-raise if file is not found + logger.error(f"Fileset JSON not found at {fileset_path}. {e}") + raise + except json.JSONDecodeError as e: + # Log error and re-raise if JSON decoding fails + logger.error(f"Error decoding fileset JSON from {fileset_path}. {e}") + raise + except KeyError as e: + # Log error and re-raise if expected keys are missing (less common for fileset) + logger.error(f"Missing expected key in fileset JSON from {fileset_path}. {e}") + raise + + + + def read_metadata(self) -> None: + """ + Reads `WorkItem` metadata from `workitems.json` and stores it. + + This method deserializes `WorkItem` objects, decoding the base64-encoded + `fileuuid` field back to its binary format. + + Raises + ------ + FileNotFoundError + If the `workitems.json` file does not exist. + """ + # Get the canonical path for the workitems JSON file + paths = self._get_metadata_paths() + workitems_path = paths["workitems_path"] + + # Load JSON data from file + logger.info(f"Attempting to read WorkItems metadata from {workitems_path}") + try: + with workitems_path.open("r") as f: + workitems_data = json.load(f) + except FileNotFoundError as e: + logger.error(f"WorkItems JSON not found at {workitems_path}. {e}") + raise + except json.JSONDecodeError as e: + logger.error(f"Error decoding WorkItems JSON from {workitems_path}. {e}") + raise + + # Reconstruct WorkItem objects from dictionaries + reconstructed_items = [] + for i, item_dict in enumerate(workitems_data): + try: + # Decode base64-encoded file UUID back to binary format + # This reverses the encoding done in write_metadata() + item_dict["fileuuid"] = base64.b64decode(item_dict["fileuuid"]) + + # Reconstruct WorkItem object from dictionary + # WorkItem is a dataclass that represents file metadata in coffea + work_item = WorkItem(**item_dict) + reconstructed_items.append(work_item) + except KeyError as e: + logger.error(f"Missing expected key '{e}' in WorkItem entry {i} from {workitems_path}.") + raise + except Exception as e: + logger.error(f"Error reconstructing WorkItem entry {i} from {workitems_path}: {e}") + raise + + # Assign the reconstructed WorkItems to the instance attribute + self.workitems = reconstructed_items + logger.info("WorkItems metadata successfully loaded.") + + def read_nanoaods_summary(self) -> None: + """ + Reads the NanoAODs summary from `nanoaods.json` and stores it. + + Raises + ------ + FileNotFoundError + If the `nanoaods.json` file does not exist. + """ + # Get the canonical path for the nanoaods summary JSON file + paths = self._get_metadata_paths() + nanoaods_summary_path = paths["nanoaods_summary_path"] + + logger.info(f"Attempting to read NanoAODs summary from {nanoaods_summary_path}") + try: + # Open and load the JSON file + with nanoaods_summary_path.open("r") as f: + self.nanoaods_summary = json.load(f) + logger.info("NanoAODs summary successfully loaded.") + except FileNotFoundError as e: + logger.error(f"NanoAODs summary JSON not found at {nanoaods_summary_path}. {e}") + raise + except json.JSONDecodeError as e: + logger.error(f"Error decoding NanoAODs summary JSON from {nanoaods_summary_path}. {e}") + raise + except KeyError as e: + logger.error(f"Missing expected key in NanoAODs summary JSON from {nanoaods_summary_path}. {e}") + raise + + def write_metadata(self) -> None: + """ + Writes the `WorkItem` metadata to `workitems.json`. + + It serializes the `coffea.processor.WorkItem` objects to a JSON file, + base64-encoding the binary `fileuuid` field for JSON compatibility. + + Raises + ------ + ValueError + If `self.workitems` has not been populated. + """ + # Ensure sample chunks are available for writing + if self.workitems is None: + raise ValueError("Sample chunks (WorkItems) are not available to write. Please extract or load them first.") + + # Get the canonical path for the workitems JSON file + paths = self._get_metadata_paths() + workitems_path = paths["workitems_path"] + + # Ensure the parent directory exists + workitems_path.parent.mkdir(parents=True, exist_ok=True) + + # Convert WorkItem objects to serializable dictionaries + serializable = [] + for workitem in self.workitems: + # Convert dataclass to a dictionary + workitem_dict = dataclasses.asdict(workitem) + + # Encode binary file UUID as base64 string for JSON compatibility + # This is necessary because JSON cannot handle raw bytes + workitem_dict["fileuuid"] = base64.b64encode(workitem_dict["fileuuid"]).decode("ascii") + + serializable.append(workitem_dict) # type: ignore + + # Write serialized metadata to JSON file with pretty-printing + with workitems_path.open("w") as f: + json.dump(serializable, f, indent=4) + + logger.info(f"WorkItems metadata saved to {workitems_path}") \ No newline at end of file diff --git a/utils/mva.py b/utils/mva.py index 50ff266..8288d23 100644 --- a/utils/mva.py +++ b/utils/mva.py @@ -830,7 +830,7 @@ def train( parameters, valid_x, valid_y ) msg += f", val_acc={val_acc:.4f}" - print(msg) + logger.info(msg) self.parameters = parameters return self.parameters diff --git a/utils/plot.py b/utils/plot.py index d26b4f9..1191449 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -19,7 +19,7 @@ from matplotlib.gridspec import GridSpec from matplotlib.ticker import ScalarFormatter -from utils.logging import ColoredFormatter +from utils.logging import setup_logging # Configure matplotlib global settings rcParams.update( @@ -34,13 +34,9 @@ # Type alias for array-like objects (supports JAX, NumPy, etc.) ArrayLike = Union[np.ndarray, Any] -# Set up module-specific logger with colored formatting +# Set up module-specific logger with setup_logging function +setup_logging("INFO") logger = logging.getLogger(__name__) -if not logger.handlers: - handler = logging.StreamHandler() - handler.setFormatter(ColoredFormatter()) - logger.addHandler(handler) - logger.setLevel(logging.INFO) def format_scientific_latex(value: float, significant_digits: int = 2) -> str: diff --git a/utils/preproc.py b/utils/preproc.py deleted file mode 100644 index fa39d4d..0000000 --- a/utils/preproc.py +++ /dev/null @@ -1,262 +0,0 @@ -import logging - -import awkward as ak -from coffea.nanoevents import NanoAODSchema, NanoEventsFactory -import dask_awkward as dak -import numpy as np -from tqdm import tqdm -import uproot - -logger = logging.getLogger(__name__) - - -def build_branches_to_keep(config, mode="uproot", is_mc=False): - """ - Build list or dict of branches to keep for preprocessing. - - Parameters - ---------- - config : Config - Configuration object with a preprocess block. - mode : str - 'uproot' returns a flat list; 'dask' returns a dict. - is_mc : bool - Whether input files are Monte Carlo. - - Returns - ------- - dict or list - Branches to retain depending on mode. - """ - branches = config.preprocess.branches - mc_branches = config.preprocess.mc_branches - filtered = {} - - for obj, obj_branches in branches.items(): - if not is_mc: - filtered[obj] = [ - br for br in obj_branches if br not in mc_branches.get(obj, []) - ] - else: - filtered[obj] = obj_branches - - if mode == "dask": - return filtered - - if mode == "uproot": - flat = [] - for obj, brs in filtered.items(): - flat.extend( - brs if obj == "event" else [f"{obj}_{br}" for br in brs] - ) - return flat - - raise ValueError("Invalid mode: use 'dask' or 'uproot'.") - - -# ----------------------------- -# Preprocessing Logic with dak -# ----------------------------- -def pre_process_dak( - input_path, - tree, - output_dir, - configuration, - step_size=100_000, - is_mc=True, -): - """ - Preprocess input ROOT file by applying basic filtering and reducing branches. - - Parameters - ---------- - input_path : str - Path to the input ROOT file. - tree : str - Name of the TTree inside the file. - output_path : str - Destination directory for filtered output. - step_size : int - Chunk size to load events incrementally. - - Returns - ------- - int - Total number of input events before filtering. - """ - with uproot.open(f"{input_path}:{tree}") as f: - total_events = f.num_entries - - logger.info("========================================") - logger.info( - f"📂 Preprocessing file: {input_path} with {total_events:,} events" - ) - - branches = build_branches_to_keep(configuration, mode="dak", is_mc=is_mc) - selected = None - - for start in range(0, total_events, step_size): - stop = min(start + step_size, total_events) - - events = NanoEventsFactory.from_root( - {input_path: tree}, - schemaclass=NanoAODSchema, - entry_start=start, - entry_stop=stop, - delayed=True, - # xrootd_handler= uproot.source.xrootd.MultithreadedXRootDSource, - ).events() - - mu_sel = ( - (events.Muon.pt > 55) - & (abs(events.Muon.eta) < 2.4) - & events.Muon.tightId - & (events.Muon.miniIsoId > 1) - ) - muon_count = ak.sum(mu_sel, axis=1) - mask = ( - events.HLT.TkMu50 & (muon_count == 1) & (events.PuppiMET.pt > 50) - ) - - filtered = events[mask] - - subset = {} - for obj, obj_branches in branches.items(): - if obj == "event": - subset.update( - { - br: filtered[br] - for br in obj_branches - if br in filtered.fields - } - ) - elif obj in filtered.fields: - subset.update( - { - f"{obj}_{br}": filtered[obj][br] - for br in obj_branches - if br in filtered[obj].fields - } - ) - - compact = dak.zip(subset, depth_limit=1) - selected = ( - compact - if selected is None - else ak.concatenate([selected, compact]) - ) - - logger.info(f"💾 Writing skimmed output to: {output_dir}") - uproot.dask_write( - selected, destination=output_dir, compute=True, tree_name=tree - ) - return total_events - - -# ----------------------------- -# Preprocessing Logic with uproot -# ----------------------------- -def pre_process_uproot( - input_path, - tree, - output_path, - configuration, - step_size=100_000, - is_mc=True, -): - """ - Process a ROOT file by applying a selection function on chunks of data - and saving the filtered results to a new file, with a progress bar. - - Parameters - ---------- - input_path : str - Path to the input ROOT file. - tree : str - Name of the TTree inside the file. - output_path : str - Path to the output ROOT file. - configuration : object - Configuration object containing branch selection and other settings. - step_size : int - Number of entries to process in each chunk. - is_mc : bool - Flag indicating whether the input data is from MC or not. - Returns - ------- - bool - True if the output file was created successfully, False otherwise. - """ - - cut_str = "HLT_TkMu50*(PuppiMET_pt>50)" - branches = build_branches_to_keep( - configuration, mode="uproot", is_mc=is_mc - ) - - # First, get the total number of entries for the - # progress bar (takes ~3min for 170M events) - total_events = len( - uproot.concatenate( - f"{input_path}:{tree}", ["run"], library="np", how=tuple - )[0] - ) - logger.info("========================================") - logger.info( - f"📂 Preprocessing file: {input_path} with {total_events:,} events" - ) - - iterable = uproot.iterate( - f"{input_path}:{tree}", - branches, - step_size=step_size, - cut=cut_str, - library="ak", - num_workers=1, # for some reason, more workers are slower - ) - - n_chunks = (total_events + step_size - 1) // step_size # Ceiling division - pbar = tqdm(iterable, total=n_chunks, desc="Processing events") - - # Initialize output file and tree - output = None - output_tree = None - branch_types = {} - - for arrays in pbar: - branches = arrays.fields - - # For the first chunk, create the output file - if output is None: - output = uproot.recreate(output_path) - - # Remember the branch structure from the first successful chunk - for branch in arrays.fields: - if isinstance(arrays[branch], ak.Array): - branch_types[branch] = arrays[branch].type - else: - branch_types[branch] = np.dtype(arrays[branch].dtype) - - # Create the output tree with proper types - output_tree = output.mktree(tree, branch_types) - - # Make sure we only write available branches that match the output tree - # This handles the case where some branches might be missing in later chunks - available_branches = set(branches) & set(branch_types.keys()) - filtered_data_to_write = { - branch: arrays[branch] for branch in available_branches - } - - # Write the filtered data for available branches only - output_tree.extend(filtered_data_to_write) - - # Close the progress bar - pbar.close() - - # Close the output file if it was created - if output is not None: - output.close() - logger.info(f"💾 Writing skimmed output to: {output_path}") - return True - else: - logger.info(f"💾 No events passed selection for {input_path}") - return False diff --git a/utils/schema.py b/utils/schema.py index 6dd757e..8b5eff2 100644 --- a/utils/schema.py +++ b/utils/schema.py @@ -119,21 +119,10 @@ class GeneralConfig(SubscriptableModel): ] analysis: Annotated[ Optional[str], - Field( - default="nondiff", - description="The analysis mode to run: 'diff' (differentiable) " - "'nondiff' or 'both'.", - ), - ] - max_files: Annotated[ - Optional[int], - Field( - default=1, - description="Maximum number of files to process per dataset. " - "Use -1 for no limit.", - ), + Field(default="nondiff", + description="The analysis mode to run: 'diff' (differentiable), 'nondiff', 'both', or 'skip' (skim-only mode)."), ] - run_preprocessing: Annotated[ + run_skimming: Annotated[ bool, Field( default=False, @@ -178,6 +167,13 @@ class GeneralConfig(SubscriptableModel): description="If True, run the MVA model pre-training step.", ), ] + run_metadata_generation: Annotated[ + bool, + Field( + default=True, + description="If True, run the JSON metadata generation step before constructing fileset.", + ), + ] read_from_cache: Annotated[ bool, Field( @@ -203,13 +199,6 @@ class GeneralConfig(SubscriptableModel): "'dask-awkward'.", ), ] - preprocessed_dir: Annotated[ - Optional[str], - Field( - default=None, - description="Directory containing pre-processed (skimmed) ROOT files.", - ), - ] cache_dir: Annotated[ Optional[str], Field( @@ -237,9 +226,9 @@ class GeneralConfig(SubscriptableModel): @model_validator(mode="after") def validate_general(self) -> "GeneralConfig": """Validate the general configuration settings.""" - if self.analysis not in ["diff", "nondiff", "both"]: + if self.analysis not in ["diff", "nondiff", "both", "skip"]: raise ValueError( - f"Invalid analysis mode '{self.analysis}'. Must be 'diff' or 'nondiff'." + f"Invalid analysis mode '{self.analysis}'. Must be 'diff', 'nondiff', 'both', or 'skip'." ) return self @@ -315,6 +304,77 @@ class JaxConfig(SubscriptableModel): ] +# ------------------------ +# Dataset configuration +# ------------------------ +class DatasetConfig(SubscriptableModel): + """Configuration for individual dataset paths, cross-sections, and metadata""" + name: Annotated[str, Field(description="Dataset name/identifier")] + directory: Annotated[str, Field(description="Directory containing dataset files")] + cross_section: Annotated[float, Field(description="Cross-section in picobarns")] + file_pattern: Annotated[str, Field(default="*.root", description="File pattern for dataset files")] + tree_name: Annotated[str, Field(default="Events", description="ROOT tree name")] + weight_branch: Annotated[str, Field(default="genWeight", description="Branch name for event weights")] + remote_access: Annotated[ + Optional[dict[str, str]], + Field(default=None, description="Configuration for remote access (EOS, XRootD, etc.)") + ] + +class DatasetManagerConfig(SubscriptableModel): + """Top-level dataset management configuration""" + datasets: Annotated[List[DatasetConfig], Field(description="List of dataset configurations")] + metadata_output_dir: Annotated[ + str, + Field(default="datasets/nanoaods_jsons/", description="Directory for metadata JSON files") + ] + max_files: Annotated[ + Optional[int], + Field( + default=None, + description="Maximum number of files to process per dataset. " + "Use -1 for no limit.", + ), + ] + +# ------------------------ +# Skimming configuration +# ------------------------ +class SkimmingConfig(SubscriptableModel): + """Configuration for workitem-based skimming selections and output""" + + # Selection function - required for workitem-based skimming + selection_function: Annotated[ + Callable, + Field(description="Selection function that returns a PackedSelection object") + ] + + # Selection inputs - required to specify what the function needs + selection_use: Annotated[ + List[ObjVar], + Field(description="List of (object, variable) tuples specifying inputs for the selection function") + ] + + # Output directory configuration + output_dir: Annotated[ + str, + Field( + description="Base directory for skimmed files. When run_skimming=True, this is where " + "skimmed files will be written. When run_skimming=False, this is where " + "existing skimmed files will be read from. Files follow the fixed structure: " + "{output_dir}/{dataset}/file__{idx}/part_X.root where X is the chunk number." + ) + ] + + # File handling configuration + chunk_size: Annotated[ + int, + Field(default=100_000, description="Number of events to process per chunk (used for configuration compatibility)") + ] + tree_name: Annotated[ + str, + Field(default="Events", description="ROOT tree name for input and output files") + ] + # ------------------------ # Preprocessing configuration # ------------------------ @@ -338,6 +398,12 @@ class PreprocessConfig(SubscriptableModel): ), ] + # Enhanced skimming configuration + skimming: Annotated[ + Optional[SkimmingConfig], + Field(default=None, description="Configuration for skimming selections and output") + ] + @model_validator(mode="after") def validate_branches(self) -> "PreprocessConfig": """Validate the branch configuration for duplicates and consistency.""" @@ -1140,6 +1206,12 @@ class Config(SubscriptableModel): ), ] + # Enhanced dataset management + datasets: Annotated[ + DatasetManagerConfig, + Field(description="Dataset management configuration (required)") + ] + @model_validator(mode="after") def validate_config(self) -> "Config": # Check for duplicate channel names @@ -1161,9 +1233,15 @@ def validate_config(self) -> "Config": "Duplicate systematic names found in configuration." ) - if self.general.run_preprocessing and not self.preprocess: + if self.general.run_skimming and not self.preprocess: + raise ValueError( + "Skimming is enabled but no preprocess configuration provided." + ) + + if self.general.run_skimming and (not self.preprocess.skimming): raise ValueError( - "Preprocessing is enabled but no preprocess configuration provided." + "Skimming is enabled but no skimming configuration provided. " + "Please provide a SkimmingConfig with selection_function, selection_use, and output_dir." ) if self.statistics is not None: @@ -1220,15 +1298,16 @@ def validate_config(self) -> "Config": seen_objects.add(object_mask.object) # check for duplicate mva parameter names - all_mva_params: List[str] = [] - for net in self.mva: - for layer in net.layers: - all_mva_params += [layer.weights, layer.bias] - duplicates = {p for p in all_mva_params if all_mva_params.count(p) > 1} - if duplicates: - raise ValueError( - f"Duplicate NN parameter names across MVAs: {sorted(duplicates)}" - ) + if self.mva is not None: + all_mva_params: List[str] = [] + for net in self.mva: + for layer in net.layers: + all_mva_params += [layer.weights, layer.bias] + duplicates = {p for p in all_mva_params if all_mva_params.count(p) > 1} + if duplicates: + raise ValueError( + f"Duplicate NN parameter names across MVAs: {sorted(duplicates)}" + ) return self diff --git a/utils/skimming.py b/utils/skimming.py new file mode 100644 index 0000000..9d2b704 --- /dev/null +++ b/utils/skimming.py @@ -0,0 +1,837 @@ +""" +Event skimming and preprocessing module. + +This module provides workitem-based processing of NanoAOD datasets with +automatic failure handling, event merging, and caching. It processes file +chunks (workitems) in parallel using dask.bag, applies configurable event +selections, and merges output files per dataset for efficient analysis. +""" + +import hashlib +import logging +import os +from pathlib import Path +from typing import Any, Dict, List, Optional, Set, Union +from collections import defaultdict + +import awkward as ak +import cloudpickle +import dask.bag +import hist +import uproot +from coffea.nanoevents import NanoAODSchema, NanoEventsFactory +from coffea.processor.executor import WorkItem +from tabulate import tabulate + +from utils.schema import SkimmingConfig +from utils.tools import get_function_arguments + +logger = logging.getLogger(__name__) + + +def default_histogram() -> hist.Hist: + """ + Create a default histogram for tracking processing success/failure. + + This histogram serves as a dummy placeholder to track whether workitems + were processed successfully. The actual analysis histograms are created + separately during the analysis phase. + + Returns + ------- + hist.Hist + A simple histogram with regular binning for tracking purposes + """ + return hist.Hist.new.Regular(10, 0, 1000).Weight() + + +def workitem_analysis( + workitem: WorkItem, + config: SkimmingConfig, + configuration: Any, + file_counters: Dict[str, int], + part_counters: Dict[str, int], + is_mc: bool = True, +) -> Dict[str, Any]: + """ + Process a single workitem for skimming analysis. + + This function handles I/O, applies skimming selections, and saves + output files on successful processing. + + Parameters + ---------- + workitem : WorkItem + The coffea WorkItem containing file metadata and entry ranges + config : SkimmingConfig + Skimming configuration with selection functions and output settings + configuration : Any + Main analysis configuration object containing branch selections + file_counters : Dict[str, int] + Pre-computed mapping of file keys to file numbers + part_counters : Dict[str, int] + Pre-computed mapping of part keys (including entry ranges) to part numbers + is_mc : bool, default True + Whether the workitem represents Monte Carlo data + + Returns + ------- + Dict[str, Any] + Dictionary containing: + - 'hist': Dummy histogram for success tracking + - 'failed_items': Set of failed workitems (empty on success) + - 'processed_events': Number of events processed + - 'output_files': List of created output files + """ + dummy_hist = default_histogram() + + try: + # Extract workitem metadata + filename = workitem.filename + treename = workitem.treename + entry_start = workitem.entrystart + entry_stop = workitem.entrystop + dataset = workitem.dataset + + # Load events using NanoEventsFactory + events = NanoEventsFactory.from_root( + {filename: treename}, + entry_start=entry_start, + entry_stop=entry_stop, + schemaclass=NanoAODSchema, + ).events() + + total_events = len(events) + + # Apply skimming selection using the provided function + selection_func = config.selection_function + selection_use = config.selection_use + + # Get function arguments using existing utility + selection_args = get_function_arguments( + selection_use, events, function_name=selection_func.__name__ + ) + packed_selection = selection_func(*selection_args) + + # Apply final selection mask + selection_names = packed_selection.names + if selection_names: + final_selection = selection_names[-1] + mask = packed_selection.all(final_selection) + else: + # No selection applied, keep all events + mask = slice(None) + + filtered_events = events[mask] + processed_events = len(filtered_events) + + # Fill dummy histogram with some dummy values for tracking + if processed_events > 0: + # Use a simple observable for the dummy histogram + dummy_values = [500.0] * min(processed_events, 100) + dummy_hist.fill(dummy_values) + + output_files = [] + if processed_events > 0: + output_file = _create_output_file_path( + workitem, config, file_counters, part_counters + ) + _save_workitem_output( + filtered_events, output_file, config, configuration, is_mc + ) + output_files.append(str(output_file)) + + return { + "hist": dummy_hist, + "failed_items": set(), + "processed_events": processed_events, + "output_files": output_files, + } + + except Exception as e: + logger.error(f"Failed to process workitem {workitem.filename}: {e}") + return { + "hist": default_histogram(), + "failed_items": {workitem}, # Track failure + "processed_events": 0, + "output_files": [], + } + + +def reduce_results( + result_a: Dict[str, Any], result_b: Dict[str, Any] +) -> Dict[str, Any]: + """ + Combine partial results from workitem processing. + + It combines histograms, failed items, and other metrics from parallel + processing. + + Parameters + ---------- + result_a : Dict[str, Any] + First result dictionary + result_b : Dict[str, Any] + Second result dictionary + + Returns + ------- + Dict[str, Any] + Combined result dictionary + """ + return { + "hist": result_a["hist"] + result_b["hist"], + "failed_items": result_a["failed_items"] | result_b["failed_items"], + "processed_events": result_a["processed_events"] + result_b["processed_events"], + "output_files": result_a["output_files"] + result_b["output_files"], + } + + +def _create_output_file_path( + workitem: WorkItem, + config: SkimmingConfig, + file_counters: Dict[str, int], + part_counters: Dict[str, int], +) -> Path: + """ + Create output file path following the existing pattern with entry-range-based + counters. + + Uses the same output structure as the current skimming code: + {output_dir}/{dataset}/file__{file_idx}/part_{chunk}.root + + Parameters + ---------- + workitem : WorkItem + The workitem being processed + config : SkimmingConfig + Skimming configuration with output directory + file_counters : Dict[str, int] + Pre-computed mapping of file keys to file numbers + part_counters : Dict[str, int] + Pre-computed mapping of part keys (including entry ranges) to part numbers + + Returns + ------- + Path + Full path to the output file + """ + dataset = workitem.dataset + + # Create keys that include entry ranges for proper differentiation + file_key = f"{dataset}::{workitem.filename}" + part_key = f"{file_key}::{workitem.entrystart}_{workitem.entrystop}" + + # Get pre-computed file and part numbers + file_number = file_counters[file_key] + part_number = part_counters[part_key] + + # Create output directory structure + base_dir = Path(config.output_dir) + dataset_dir = base_dir / dataset / f"file__{file_number}" + dataset_dir.mkdir(parents=True, exist_ok=True) + + # Create output filename with entry-range-based part number + output_filename = f"part_{part_number}.root" + return dataset_dir / output_filename + + +def _save_workitem_output( + events: Any, + output_file: Path, + config: SkimmingConfig, + configuration: Any, + is_mc: bool, +) -> None: + """ + Save filtered events to output ROOT file. + + This function handles the actual I/O of saving skimmed events to disk, + using the same branch selection logic as the existing skimming code. + + Parameters + ---------- + events : Any + Filtered events to save + output_file : Path + Output file path + config : SkimmingConfig + Skimming configuration + configuration : Any + Main analysis configuration with branch selections + is_mc : bool + Whether this is Monte Carlo data + """ + # Build branches to keep using existing logic + branches_to_keep = _build_branches_to_keep(configuration, is_mc) + + # Create output file + with uproot.recreate(str(output_file)) as output_root: + # Prepare data for writing + output_data = {} + + # Extract branches following the existing pattern + for obj, obj_branches in branches_to_keep.items(): + if obj == "event": + # Event-level branches + for branch in obj_branches: + if hasattr(events, branch): + output_data[branch] = getattr(events, branch) + else: + # Object collection branches + if hasattr(events, obj): + obj_collection = getattr(events, obj) + for branch in obj_branches: + if hasattr(obj_collection, branch): + output_data[f"{obj}_{branch}"] = getattr( + obj_collection, branch + ) + + # Create and populate output tree + if output_data: + output_tree = output_root.mktree( + config.tree_name, {k: v.type for k, v in output_data.items()} + ) + output_tree.extend(output_data) + + +def _build_branches_to_keep( + configuration: Any, is_mc: bool +) -> Dict[str, List[str]]: + """ + Build dictionary of branches to keep based on configuration. + + This replicates the logic from the existing skimming code to determine + which branches should be saved in the output files. + + Parameters + ---------- + configuration : Any + Main analysis configuration + is_mc : bool + Whether this is Monte Carlo data + + Returns + ------- + Dict[str, List[str]] + Dictionary mapping object names to lists of branch names + """ + branches = configuration.preprocess.branches + mc_branches = configuration.preprocess.mc_branches + + filtered = {} + for obj, obj_branches in branches.items(): + if not is_mc: + # For data, exclude MC-only branches + filtered[obj] = [ + br for br in obj_branches + if br not in mc_branches.get(obj, []) + ] + else: + # For MC, keep all branches + filtered[obj] = obj_branches + + return filtered + + +class WorkitemSkimmingManager: + """ + Manager for workitem-based skimming using dask.bag processing. + + This class orchestrates the new preprocessing workflow that processes + workitems directly using dask.bag, providing robust failure handling + and retry mechanisms. + + Attributes + ---------- + config : SkimmingConfig + Skimming configuration with selection functions and output settings + """ + + def __init__(self, config: SkimmingConfig): + """ + Initialize the workitem skimming manager. + + Parameters + ---------- + config : SkimmingConfig + Skimming configuration with selection functions and output settings + """ + self.config = config + logger.info("Initialized workitem-based skimming manager") + + def process_workitems( + self, + workitems: List[WorkItem], + configuration: Any, + split_every: int = 4, + max_retries: int = 3, + ) -> Dict[str, Any]: + """ + Process a list of workitems using dask.bag with failure handling. + + This is the main entry point that implements the dask.bag workflow + with retry logic for failed workitems. + + Parameters + ---------- + workitems : List[WorkItem] + List of workitems to process + configuration : Any + Main analysis configuration object + split_every : int, default 4 + Split parameter for dask.bag.fold operation + max_retries : int, default 3 + Maximum number of retry attempts for failed workitems + + Returns + ------- + Dict[str, Any] + Final combined results with histograms and processing statistics + """ + logger.info(f"Processing {len(workitems)} workitems") + + # Pre-compute file and part counters for all workitems + file_counters, part_counters = self._compute_counters(workitems) + + # Initialize accumulator for successful results + full_result = { + "hist": default_histogram(), + "failed_items": set(), + "processed_events": 0, + "output_files": [], + } + + # Process workitems with retry logic + remaining_workitems = workitems.copy() + retry_count = 0 + + while remaining_workitems and retry_count < max_retries: + logger.info( + f"Attempt {retry_count + 1}: processing " + f"{len(remaining_workitems)} workitems" + ) + + # Create dask bag from remaining workitems + bag = dask.bag.from_sequence(remaining_workitems) + + # Map analysis function over workitems + futures = bag.map( + lambda wi: workitem_analysis( + wi, + self.config, + configuration, + file_counters, + part_counters, + is_mc=self._is_monte_carlo(wi.dataset), + ) + ) + + # Reduce results using fold operation + task = futures.fold(reduce_results, split_every=split_every) + + # Compute results + (result,) = dask.compute(task) + + # Update remaining workitems to failed ones + remaining_workitems = list(result["failed_items"]) + + # Accumulate successful results + if result["processed_events"] > 0: + full_result["hist"] += result["hist"] + full_result["processed_events"] += result["processed_events"] + full_result["output_files"].extend(result["output_files"]) + + # Log progress + failed_count = len(remaining_workitems) + successful_count = len(workitems) - failed_count + logger.info( + f"Attempt {retry_count + 1} complete: " + f"{successful_count} successful, {failed_count} failed" + ) + + retry_count += 1 + + # Final logging + if remaining_workitems: + logger.warning( + f"Failed to process {len(remaining_workitems)} workitems " + f"after {max_retries} attempts" + ) + full_result["failed_items"] = set(remaining_workitems) + else: + logger.info("All workitems processed successfully") + + # Create summary statistics by dataset + self._log_processing_summary(workitems, full_result["output_files"]) + + return full_result + + def discover_workitem_outputs(self, workitems: List[WorkItem]) -> List[str]: + """ + Discover existing output files from previous workitem processing. + + This method scans for output files that would be created by the + workitem processing, allowing for resumption of interrupted workflows. + + Parameters + ---------- + workitems : List[WorkItem] + List of workitems to check for existing outputs + + Returns + ------- + List[str] + List of existing output file paths with tree names + """ + output_files = [] + dataset_counts = {} + + # Use the same counter computation as processing + file_counters, part_counters = self._compute_counters(workitems) + + for workitem in workitems: + expected_output = _create_output_file_path( + workitem, self.config, file_counters, part_counters + ) + + if expected_output.exists(): + # Add tree name for compatibility with existing code + file_with_tree = f"{expected_output}:{self.config.tree_name}" + output_files.append(file_with_tree) + + # Count files per dataset + dataset = workitem.dataset + dataset_counts[dataset] = dataset_counts.get(dataset, 0) + 1 + + # Log with dataset breakdown + if dataset_counts: + dataset_info = ", ".join([ + f"{dataset}: {count}" for dataset, count in dataset_counts.items() + ]) + logger.info( + f"Found existing skimmed files for {dataset_info}" + ) + else: + logger.info("No existing output files found") + + return output_files + + def _log_processing_summary( + self, workitems: List[WorkItem], output_files: List[str] + ) -> None: + """ + Log a summary table of processing results by dataset. + + Parameters + ---------- + workitems : List[WorkItem] + Original list of workitems processed + output_files : List[str] + List of output files created + """ + # Collect statistics by dataset + dataset_stats = defaultdict( + lambda: {"events_processed": 0, "files_written": 0} + ) + + # Count events processed by reading from output files + for output_file in output_files: + try: + # Extract dataset from file path + # Path format: {output_dir}/{dataset}/file__{N}/part_{M}.root + path_parts = Path(output_file).parts + if len(path_parts) >= 3: + dataset = path_parts[-3] # Get dataset from path + + # Read the file to count events + with uproot.open(output_file) as f: + if self.config.tree_name in f: + tree = f[self.config.tree_name] + num_events = tree.num_entries + dataset_stats[dataset]["events_processed"] += num_events + dataset_stats[dataset]["files_written"] += 1 + + except Exception as e: + # Still count the file even if we can't read events + try: + path_parts = Path(output_file).parts + if len(path_parts) >= 3: + dataset = path_parts[-3] + dataset_stats[dataset]["files_written"] += 1 + except Exception: + pass + + # Create summary table + if dataset_stats: + table_data = [] + total_events = 0 + total_files = 0 + + for dataset, stats in sorted(dataset_stats.items()): + events = stats["events_processed"] + files = stats["files_written"] + table_data.append([dataset, f"{events:,}", files]) + total_events += events + total_files += files + + # Add totals row + table_data.append(["TOTAL", f"{total_events:,}", total_files]) + + # Create and log table + headers = ["Dataset", "Events Saved", "Files Written"] + table = tabulate(table_data, headers=headers, tablefmt="grid") + + logger.info("Processing Summary:") + logger.info(f"\n{table}") + else: + logger.info("No output files were created during processing") + + def _compute_counters( + self, workitems: List[WorkItem] + ) -> tuple[Dict[str, int], Dict[str, int]]: + """ + Pre-compute file and part counters for all workitems. + + This ensures consistent numbering across all workers by computing + the counters once before parallel processing begins. + + Parameters + ---------- + workitems : List[WorkItem] + List of all workitems to process + + Returns + ------- + tuple[Dict[str, int], Dict[str, int]] + File counters and part counters dictionaries + """ + file_counters = {} + part_counters = {} + + # Track unique files per dataset for sequential file numbering + dataset_file_counts = {} + + for workitem in workitems: + dataset = workitem.dataset + file_key = f"{dataset}::{workitem.filename}" + part_key = f"{file_key}::{workitem.entrystart}_{workitem.entrystop}" + + # Assign file number if not already assigned + if file_key not in file_counters: + if dataset not in dataset_file_counts: + dataset_file_counts[dataset] = 0 + file_counters[file_key] = dataset_file_counts[dataset] + dataset_file_counts[dataset] += 1 + + # Assign part number if not already assigned + if part_key not in part_counters: + # Count existing parts for this file + existing_parts = [ + k for k in part_counters.keys() if k.startswith(f"{file_key}::") + ] + part_counters[part_key] = len(existing_parts) + + return file_counters, part_counters + + def _is_monte_carlo(self, dataset: str) -> bool: + """ + Determine if a dataset represents Monte Carlo data. + + Parameters + ---------- + dataset : str + Dataset name + + Returns + ------- + bool + True if dataset is Monte Carlo, False if it's data + """ + # Simple heuristic: data datasets typically contain "data" in the name + return "data" not in dataset.lower() + + +def process_workitems_with_skimming( + workitems: List[WorkItem], + config: Any, + fileset: Optional[Dict[str, Any]] = None, + nanoaods_summary: Optional[Dict[str, Any]] = None, + cache_dir: str = "/tmp/gradients_analysis/", +) -> Dict[str, Any]: + """ + Process workitems using the workitem-based skimming approach with event + merging and caching. + + This function serves as the main entry point for the workitem-based + preprocessing workflow. It processes workitems (if skimming is enabled) + and then discovers, merges, and caches events from the saved files for + analysis. Events from multiple output files per dataset are automatically + merged into a single NanoEvents object for improved performance and memory + efficiency. + + Parameters + ---------- + workitems : List[WorkItem] + List of workitems to process, typically from NanoAODMetadataGenerator.workitems + config : Any + Main analysis configuration object containing skimming and preprocessing settings + fileset : Optional[Dict[str, Any]], default None + Fileset containing metadata including cross-sections for normalization + nanoaods_summary : Optional[Dict[str, Any]], default None + NanoAODs summary containing event counts per dataset for nevts metadata + cache_dir : str, default "/tmp/gradients_analysis/" + Directory for caching merged events. Cached files use the pattern: + {cache_dir}/{dataset}__{hash}.pkl where hash is based on input file paths + + Returns + ------- + Dict[str, List[Tuple[Any, Dict[str, Any]]]] + Dictionary mapping dataset names to lists containing a single (events, metadata) tuple. + Events are merged NanoEvents objects from all output files for the dataset. + Each metadata dictionary contains dataset, process, variation, and xsec information. + """ + logger.info(f"Starting workitem preprocessing with {len(workitems)} workitems") + + # Create workitem skimming manager + skimming_manager = WorkitemSkimmingManager(config.preprocess.skimming) + + # Group workitems by dataset + workitems_by_dataset = {} + for workitem in workitems: + dataset = workitem.dataset + if dataset not in workitems_by_dataset: + workitems_by_dataset[dataset] = [] + workitems_by_dataset[dataset].append(workitem) + + # Process workitems if skimming is enabled + if config.general.run_skimming: + logger.info("Running skimming") + results = skimming_manager.process_workitems(workitems, config) + logger.info(f"Skimming complete: {results['processed_events']:,} events") + + # Always discover and read from saved files + logger.info("Reading from saved files") + processed_datasets = {} + + for dataset, dataset_workitems in workitems_by_dataset.items(): + # Skip datasets not explicitly requested in config + if hasattr(config.general, 'processes') and config.general.processes: + process_name = dataset.split('__')[0] if '__' in dataset else dataset + if process_name not in config.general.processes: + logger.info(f"Skipping {dataset} (not requested)") + continue + + # Discover output files for this dataset + output_files = skimming_manager.discover_workitem_outputs(dataset_workitems) + + if output_files: + # Create metadata for compatibility with existing analysis code + metadata = { + "dataset": dataset, + "process": dataset.split('__')[0] if '__' in dataset else dataset, + "variation": dataset.split('__')[1] if '__' in dataset else "nominal", + } + + # Add cross-section metadata from fileset if available + if fileset and dataset in fileset: + xsec = fileset[dataset].get('metadata', {}).get('xsec', 1.0) + metadata['xsec'] = xsec + else: + metadata['xsec'] = 1.0 + if fileset: + logger.warning(f"No cross-section for {dataset}, using 1.0") + + # Add nevts from NanoAODs summary if available + # The analysis code expects 'nevts' field for normalization + nevts = 0 + if nanoaods_summary: + # Parse dataset to get process and variation + process_name = dataset.split('__')[0] if '__' in dataset else dataset + variation = dataset.split('__')[1] if '__' in dataset else "nominal" + + if process_name in nanoaods_summary: + if variation in nanoaods_summary[process_name]: + nevts = nanoaods_summary[process_name][variation].get( + 'nevts_total', 0 + ) + + metadata['nevts'] = nevts + if nevts == 0: + logger.warning(f"No nevts found for {dataset}, using 0") + + # Create cache key for the merged dataset + # Use sorted file paths to ensure consistent cache key + sorted_files = sorted(output_files) + cache_input = f"{dataset}::{':'.join(sorted_files)}" + cache_key = hashlib.md5(cache_input.encode()).hexdigest() + cache_file = os.path.join(cache_dir, f"{dataset}__{cache_key}.pkl") + + # Check if we should read from cache + if config.general.read_from_cache and os.path.exists(cache_file): + logger.info(f"Loading cached events for {dataset}") + try: + with open(cache_file, "rb") as f: + merged_events = cloudpickle.load(f) + logger.info(f"Loaded {len(merged_events)} cached events") + processed_datasets[dataset] = [(merged_events, metadata.copy())] + continue # Skip to next dataset + except Exception as e: + logger.error(f"Failed to load cached events for {dataset}: {e}") + # Fall back to loading from files + + # Load and merge events from all discovered files + all_events = [] + total_events_loaded = 0 + + for file_path in output_files: + try: + # Load events using NanoEventsFactory + events = NanoEventsFactory.from_root( + file_path, schemaclass=NanoAODSchema, mode="eager" + ).events() + events = ak.materialize(events) + all_events.append(events) + total_events_loaded += len(events) + except Exception as e: + logger.error(f"Failed to load events from {file_path}: {e}") + continue + + # Merge all events into a single array if we have any events + if all_events: + try: + if len(all_events) == 1: + # Single file, no need to concatenate + merged_events = all_events[0] + else: + # Multiple files, concatenate them + merged_events = ak.concatenate(all_events, axis=0) + + logger.info( + f"Merged {len(output_files)} files → " + f"{len(merged_events)} events for {dataset}" + ) + + # Cache the merged events + try: + os.makedirs(cache_dir, exist_ok=True) + with open(cache_file, "wb") as f: + cloudpickle.dump(merged_events, f) + logger.info(f"Cached events for {dataset}") + except Exception as e: + logger.warning(f"Failed to cache events for {dataset}: {e}") + + processed_datasets[dataset] = [(merged_events, metadata.copy())] + + except Exception as e: + logger.error(f"Failed to merge events for {dataset}: {e}") + # Fallback to individual events if merging fails + processed_events = [] + for i, events in enumerate(all_events): + processed_events.append((events, metadata.copy())) + processed_datasets[dataset] = processed_events + else: + logger.warning(f"No output files found for {dataset}") + processed_datasets[dataset] = [] + + return processed_datasets diff --git a/utils/tools.py b/utils/tools.py index 761f9e6..46332fd 100644 --- a/utils/tools.py +++ b/utils/tools.py @@ -1,8 +1,10 @@ +import logging from collections import defaultdict -from typing import Any, Mapping, Sequence +from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple import awkward as ak +logger = logging.getLogger(__name__) def nested_defaultdict_to_dict(nested_structure: Any) -> dict: """ @@ -70,3 +72,56 @@ def recursive_to_backend(data_structure: Any, backend: str = "jax") -> Any: else: # Leave unchanged if not an Awkward structure return data_structure + + +def get_function_arguments( + arg_spec: List[Tuple[str, Optional[str]]], + objects: Dict[str, ak.Array], + function_name: Optional[str] = "generic_function" +) -> List[ak.Array]: + """ + Prepare function arguments from object dictionary. + + Parameters + ---------- + arg_spec : List[Tuple[str, Optional[str]]] + List of (object, field) specifications + objects : Dict[str, ak.Array] + Object dictionary + function_name : Optional[str] + Name of function for error reporting + + Returns + ------- + List[ak.Array] + Prepared arguments + """ + def raise_error(field_name: str) -> None: + """ + Raise KeyError if object is missing in objects dictionary. + + Parameters + ---------- + field_name : str + Missing field name + """ + logger.error( + f"Field '{field_name}' needed for {function_name} " + f"is not found in objects dictionary" + ) + raise KeyError(f"Missing field: {field_name}, function: {function_name}") + + args = [] + for obj_name, field_name in arg_spec: + if field_name: + try: + args.append(objects[obj_name][field_name]) + except KeyError: + raise_error(f"{obj_name}.{field_name}") + else: + try: + args.append(objects[obj_name]) + except KeyError: + raise_error(obj_name) + + return args