From ae18b27f4fd4ea8c462719418ee74e238713a1af Mon Sep 17 00:00:00 2001 From: Peter Eduard Meiring Date: Wed, 18 Jun 2025 23:54:52 +0200 Subject: [PATCH 1/5] enable 2025 eras, disable offline dqm plotters --- automation/config.yaml | 40 +++++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/automation/config.yaml b/automation/config.yaml index 3b870c4..ab49319 100644 --- a/automation/config.yaml +++ b/automation/config.yaml @@ -1,24 +1,25 @@ -JetMET: - datasets: - - 'JetMET0' - - 'JetMET1' - eras: - - 'Run2024*' - scripts: - - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_DiJet.root -c DiJet' - plotting: - - 'python3 ../plotting/make_DiJet_plots.py --dir $OUTDIR --config ../config_cards/full_DiJet.yaml' +# JetMET: +# datasets: +# - 'JetMET0' +# - 'JetMET1' +# eras: +# - 'Run2024*' +# scripts: +# - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_DiJet.root -c DiJet' +# plotting: +# - 'python3 ../plotting/make_DiJet_plots.py --dir $OUTDIR --config ../config_cards/full_DiJet.yaml' EGamma: datasets: - 'EGamma0' - 'EGamma1' eras: - - 'Run2024*' + - 'Run2024I' + - 'Run2025*' scripts: - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_PhotonJet.root -c PhotonJet' - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToEE.root -c ZToEE' - - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/oug_zee_dqmoff.root -c ZToEEDQMOff' + # - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/oug_zee_dqmoff.root -c ZToEEDQMOff' plotting: - 'python3 ../plotting/make_ZToEE_plots.py --dir $OUTDIR --config ../config_cards/full_ZToEE.yaml' - 'python3 ../plotting/make_PhotonJet_plots.py --dir $OUTDIR --config ../config_cards/full_PhotonJet.yaml' @@ -28,16 +29,17 @@ Muon: - 'Muon0' - 'Muon1' eras: - - 'Run2024*' + - 'Run2024I' + - 'Run2025*' scripts: - - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToMuMu.root -c ZToMuMu' + # - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToMuMu.root -c ZToMuMu' - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_MuonJet.root -c MuonJet' #TODO not working - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToTauTau.root -c ZToTauTau' - - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_zmumu_dqmoffl.root -c ZToMuMuDQMOff' - - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_jets_dqmoff.root -c JetsDQMOff' - - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_ztautau_dqmoff.root -c ZToTauTauDQMOff' - - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_etsum_dqmoff.root -c EtSumDQMOff' + # - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_zmumu_dqmoffl.root -c ZToMuMuDQMOff' + # - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_jets_dqmoff.root -c JetsDQMOff' + # - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_ztautau_dqmoff.root -c ZToTauTauDQMOff' + # - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_etsum_dqmoff.root -c EtSumDQMOff' plotting: - - 'python3 ../plotting/make_ZToMuMu_plots.py --dir $OUTDIR --config ../config_cards/full_ZToMuMu.yaml' + # - 'python3 ../plotting/make_ZToMuMu_plots.py --dir $OUTDIR --config ../config_cards/full_ZToMuMu.yaml' - 'python3 ../plotting/make_ZToTauTau_plots.py --dir $OUTDIR --config ../config_cards/full_ZToTauTau.yaml' - 'python3 ../plotting/make_MuonJet_plots.py --dir $OUTDIR --config ../config_cards/full_MuonJet.yaml' \ No newline at end of file From 23dbac690e1e218962b270aab397da4aa16f94ad Mon Sep 17 00:00:00 2001 From: Peter Eduard Meiring Date: Thu, 19 Jun 2025 00:21:15 +0200 Subject: [PATCH 2/5] enable automatic condor jobs and fix 2025 weekly plots --- automation/cron_job.sh | 57 +++++++++++++++++++++++++--- automation/merge_per_era.py | 6 +-- automation/utils.py | 76 ++++++++++++++++++++++++++++++++++++- 3 files changed, 129 insertions(+), 10 deletions(-) diff --git a/automation/cron_job.sh b/automation/cron_job.sh index 53ae864..7a8dad5 100755 --- a/automation/cron_job.sh +++ b/automation/cron_job.sh @@ -1,8 +1,55 @@ #!/bin/bash -# python3 make_hists.py -# python3 merge_per_run.py -# python3 merge_per_era.py -# python3 merge_total.py -# python3 make_plots.py + +LOCKFILE="cron.lock" +LOGFILE="cron.log" + +# Exit if lock file exists +if [ -e "$LOCKFILE" ]; then + # echo "Lock file exists: another job is running." $(date) + exit 1 +fi + +# Define a cleanup function in case of exit +function cleanup() { + rm -f "$LOCKFILE" + rm -f "$LOGFILE" +} + +# Define a waiting function for condor jobs to finish +function waitForCondor() { + while true; do + out=$(condor_q 2>&1) + echo "$out" | grep -q "Failed to fetch ads" && { echo "[waitForCondor] condor_q failed, retrying..."; sleep 60; continue; } + echo "$out" | grep -q "$1" || { echo "[waitForCondor] Job $1 finished."; break; } + sleep 60 + done +} + +# Create lock file - cron_job is in running state +touch "$LOCKFILE" +trap cleanup EXIT + +# Submit histomaker to condor and wait for jobs to finish +echo "Filling histograms..." +python3 make_hists.py --htcondor +condor_submit submit.txt | tee submitinfo +cluster=$(cat submitinfo | grep "submitted to cluster" | sed "s/.*cluster //; s/\.//") +waitForCondor $cluster + +# Merge files locally +echo "Merging files..." +python3 merge_per_run.py +python3 -c 'from utils import generate_weekDict; generate_weekDict()' +cp week_runs.csv /eos/user/p/pmeiring/www/L1Trigger/l1dpg/DQM/Weekly/week_runs.txt +python3 merge_per_era.py +python3 merge_total.py + +# Submit plotmaker to condor and wait for jobs to finish +echo "Producing plots..." +python3 make_plots.py --htcondor +condor_submit submit.txt | tee submitinfo +cluster=$(cat submitinfo | grep "submitted to cluster" | sed "s/.*cluster //; s/\.//") +waitForCondor $cluster + date echo "All done!" \ No newline at end of file diff --git a/automation/merge_per_era.py b/automation/merge_per_era.py index 87acafa..714965f 100644 --- a/automation/merge_per_era.py +++ b/automation/merge_per_era.py @@ -1,14 +1,15 @@ #!/bin/python3 from glob import glob -from utils import hadd, get_weeks, htcondor_flag, dqm_prefix +from utils import hadd, get_weeks, get_weeks_v2, htcondor_flag, dqm_prefix htcondor = htcondor_flag() # collect all histogram root files merged by run all_files = glob(f"{dqm_prefix}/*/*/*/*/*/merged/*.root") #change later to dqm_prefix -weeks = get_weeks() +# weeks = get_weeks() +weeks = get_weeks_v2() # group files by week and era file_groups = {} @@ -33,7 +34,6 @@ file_groups[target] = [] file_groups[target].append(file) - # Hadd grouped files for target, files in file_groups.items(): hadd(target, files, htcondor) diff --git a/automation/utils.py b/automation/utils.py index 66cc5b5..21b6798 100644 --- a/automation/utils.py +++ b/automation/utils.py @@ -1,8 +1,10 @@ -import os, subprocess, argparse, uproot +import os, subprocess, argparse, uproot, re, csv import pandas as pd +from datetime import datetime +from collections import defaultdict #dqm_prefix = '/eos/cms/store/group/dpg_trigger/comm_trigger/L1Trigger/cmsl1dpg/www/DQM/T0PromptNanoMonit' -dqm_prefix = "/eos/user/l/lebeling/www/DQM" +dqm_prefix = "/eos/user/p/pmeiring/www/L1Trigger/l1dpg/DQM" tier0 = "/eos/cms/tier0/store/data" @@ -56,6 +58,76 @@ def get_weeks(year=2024): return weeks +def get_weeks_v2(csv_path="run_weeks.csv"): + run_week_dict = {} + try: + with open(csv_path, newline='') as csvfile: + reader = csv.DictReader(csvfile) + for row in reader: + try: + run = int(row["RunNumber"]) + full_week = row["ISO_Week"] + # Extract just the week number (e.g., '2025-W22' -> '22') + week_only = full_week.split("-W")[1] + run_week_dict[run] = week_only + except (ValueError, KeyError, IndexError): + continue # Skip malformed rows + except FileNotFoundError: + print(f"File not found: {csv_path}") + except Exception as e: + print(f"Error reading CSV: {e}") + + return run_week_dict + +def generate_weekDict(year=2025): + run_week_dict = {} + week_run_dict = defaultdict(list) + + for era in os.listdir(tier0): + if not era.startswith(f"Run{year}"): + continue + + era_path = os.path.join(tier0, era, "L1Accept/RAW/v1/000/") + if not os.path.isdir(era_path): + continue + + for root, dirs, files in os.walk(era_path): + if not root.endswith("/00000"): + continue + + match = re.search(r"/(\d{3})/(\d{3})/00000$", root) + if not match: + continue + + run_number = int(match.group(1) + match.group(2)) + + # Get parent directory of "00000", i.e., the run directory + run_dir = os.path.dirname(root) + + try: + dir_stat = os.stat(run_dir) + # Use st_mtime (last modification time) or st_ctime (creation time on some systems) + timestamp = dir_stat.st_mtime + dt = datetime.fromtimestamp(timestamp) + iso_week = dt.strftime("%G-W%V") + run_week_dict[run_number] = iso_week + week_run_dict[iso_week].append(run_number) + except Exception: + continue # Skip if stats can't be read + + with open("run_weeks.csv", "w", newline="") as f: + writer = csv.writer(f) + writer.writerow(["RunNumber", "ISO_Week"]) + for run, week in sorted(run_week_dict.items()): + writer.writerow([run, week]) + + with open("week_runs.csv", "w", newline="") as f: + writer = csv.writer(f) + writer.writerow(["ISO_Week", "RunNumbers"]) + for week, runs in sorted(week_run_dict.items()): + run_list_str = " ".join(str(r) for r in sorted(runs)) + writer.writerow([week, run_list_str]) + def hadd(target, files, htcondor = False): os.makedirs(os.path.dirname(target), exist_ok=True) From b451cf2d8ebb2b272c4e26dadd3ff9a335aeb67b Mon Sep 17 00:00:00 2001 From: Peter Eduard Meiring Date: Thu, 19 Jun 2025 00:23:00 +0200 Subject: [PATCH 3/5] fix jetID and use lowest unprescaled isophoton path for 2025 PhotonJet --- helpers/helper_nano.py | 22 ++++++++++++---------- l1macros/performances_nano.py | 2 ++ 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/helpers/helper_nano.py b/helpers/helper_nano.py index 0c57026..95017b1 100644 --- a/helpers/helper_nano.py +++ b/helpers/helper_nano.py @@ -7,6 +7,7 @@ from bins import * response_bins = array('f',[0.+float(i)/100. for i in range(200)] ) +photonpt_bins = array('f',[0.+float(i)*10. for i in range(50)] ) runnb_bins = None @@ -61,7 +62,7 @@ def make_filter(golden_json): } for(unsigned int i = 0;i< (Jet_pt).size();i++ ){ cout << "jet Pt, Eta, Phi: " << (Jet_pt)[i]<<", "<<(Jet_eta)[i]<<", "<<(Jet_phi)[i]<20 GeV. The event must pass a photon trigger. ''' -# df = df.Filter('PuppiMET_pt<50') - df = df.Filter('HLT_Photon110EB_TightID_TightIso') + df = df.Filter('PuppiMET_pt<50') + df = df.Filter('HLT_Photon45EB_TightID_TightIso') df = df.Define('photonsptgt20','Photon_pt>20') df = df.Filter('Sum(photonsptgt20)==1','=1 photon with p_{T}>20 GeV') - df = df.Define('isRefPhoton','Photon_mvaID_WP80&&Photon_electronVeto&&Photon_pt>115&&abs(Photon_eta)<1.479') + df = df.Define('isRefPhoton','Photon_mvaID_WP80&&Photon_electronVeto&&Photon_pt>20&&abs(Photon_eta)<1.479') + df = df.Filter('Sum(isRefPhoton)==1','Photon has p_{T}>115 GeV, passes tight ID and is in EB') df = df.Define('cleanPh_Pt','Photon_pt[isRefPhoton]') @@ -338,7 +340,7 @@ def DiJetSelection(df): ''' df = df.Filter('HLT_AK8PFJet500&&PuppiMET_pt<300') - df = df.Define('isHighPtJet','Jet_jetId>=6&&Jet_pt>500&&Jet_muEF<0.5&&Jet_chEmEF<0.5&&Jet_neEmEF<0.8') + df = df.Define('isHighPtJet','Jet_passJetIdTight&&Jet_passJetIdTightLepVeto&&Jet_pt>500&&Jet_muEF<0.5&&Jet_chEmEF<0.5&&Jet_neEmEF<0.8') df = df.Filter('Sum(isHighPtJet)==2','=2 jets with pt>500 GeV') df = df.Filter('isHighPtJet[0]&&isHighPtJet[1]','First 2 jets are the cleaned jets') @@ -637,8 +639,7 @@ def L1ETMHF(df): def CleanJets(df): #List of cleaned jets (noise cleaning + lepton/photon overlap removal) - df = df.Define('_jetPassID', 'Jet_jetId>=4') - df = df.Define('isCleanJet','_jetPassID&&Jet_pt>30&&Jet_muEF<0.5&&Jet_chEmEF<0.5') + df = df.Define('isCleanJet','Jet_passJetIdTight&&Jet_passJetIdTightLepVeto&&Jet_pt>30&&Jet_muEF<0.5&&Jet_chEmEF<0.5') df = df.Define('cleanJet_Pt','Jet_pt[isCleanJet]') df = df.Define('cleanJet_Eta','Jet_eta[isCleanJet]') df = df.Define('cleanJet_Phi','Jet_phi[isCleanJet]') @@ -732,12 +733,12 @@ def EtSum(df, suffix = ''): histos['HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_DiJet140_70_Mjj900'+suffix] = df.Filter('HLT_PFMETNoMu120_PFMHTNoMu120_IDTight&&vbf_selection').Histo1D(ROOT.RDF.TH1DModel('h_HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_DiJet140_70_Mjj900'+suffix, '', len(jetmetpt_bins)-1, array('d',jetmetpt_bins)), 'MetNoMu') # VBF (Met + jet) trigger - histos['HLT_DiJet110_35_Mjj650_PFMET110_DiJet140_70_Mjj900'+suffix] = df.Filter('HLT_DiJet110_35_Mjj650_PFMET110&&vbf_selection').Histo1D(ROOT.RDF.TH1DModel('h_HLT_DiJet110_35_Mjj650_PFMET110_DiJet140_70_Mjj900'+suffix, '', len(jetmetpt_bins)-1, array('d',jetmetpt_bins)), 'MetNoMu') + # histos['HLT_DiJet110_35_Mjj650_PFMET110_DiJet140_70_Mjj900'+suffix] = df.Filter('HLT_DiJet110_35_Mjj650_PFMET110&&vbf_selection').Histo1D(ROOT.RDF.TH1DModel('h_HLT_DiJet110_35_Mjj650_PFMET110_DiJet140_70_Mjj900'+suffix, '', len(jetmetpt_bins)-1, array('d',jetmetpt_bins)), 'MetNoMu') # VBF trigger if max(runnb_bins) > 367661: histos['h_MetNoMu_Denominator_VBF_DiJet70_40_500'+suffix] = df.Filter('run>367661').Filter('HLT_VBF_filter').Histo1D(ROOT.RDF.TH1DModel('h_MetNoMu_Denominator_VBF_DiJet70_40_500'+suffix, '', len(jetmetpt_bins)-1, array('d',jetmetpt_bins)), 'MetNoMu') - histos['HLT_VBF_DiPFJet75_40_Mjj500_Detajj2p5_PFMET85'+suffix] = df.Filter('run>367661').Filter('HLT_VBF_filter&&HLT_VBF_DiPFJet75_40_Mjj500_Detajj2p5_PFMET85').Histo1D(ROOT.RDF.TH1DModel('HLT_VBF_DiPFJet75_40_Mjj500_Detajj2p5_PFMET85'+suffix, '', len(jetmetpt_bins)-1, array('d',jetmetpt_bins)), 'MetNoMu') + # histos['HLT_VBF_DiPFJet75_40_Mjj500_Detajj2p5_PFMET85'+suffix] = df.Filter('run>367661').Filter('HLT_VBF_filter&&HLT_VBF_DiPFJet75_40_Mjj500_Detajj2p5_PFMET85').Histo1D(ROOT.RDF.TH1DModel('HLT_VBF_DiPFJet75_40_Mjj500_Detajj2p5_PFMET85'+suffix, '', len(jetmetpt_bins)-1, array('d',jetmetpt_bins)), 'MetNoMu') return df, histos @@ -941,6 +942,7 @@ def AnalyzePtBalance(df, suffix = ''): histos['L1JetvsPU'+str_bineta+suffix] = df_JetsBinnedInEta[str_bineta].Histo2D(ROOT.RDF.TH2DModel('h_L1PtBalanceVsPU_{}'.format(str_bineta)+suffix, 'ptbalanceL1', 100, 0, 100, 100, 0, 2), 'PV_npvs','ptbalanceL1') # only one jet with pT > 30 GeV histos['L1JetvsRunNb_singlejet'+str_bineta+suffix] = df_JetsBinnedInEta[str_bineta].Filter('Sum(isCleanJet)==1','==1 clean jet with p_{T}>30 GeV').Histo2D(ROOT.RDF.TH2DModel('h_L1PtBalanceVsRunNb_singlejet_{}'.format(str_bineta)+suffix, 'ptbalanceL1', len(runnb_bins)-1, runnb_bins, len(response_bins)-1, response_bins), 'run','ptbalanceL1') + histos['L1JetvsPhotonPt_singlejet'+str_bineta+suffix] = df_JetsBinnedInEta[str_bineta].Filter('Sum(isCleanJet)==1','==1 clean jet with p_{T}>30 GeV').Histo2D(ROOT.RDF.TH2DModel('h_L1PtBalanceVsPhotonPt_singlejet_{}'.format(str_bineta)+suffix, 'ptbalanceL1', len(photonpt_bins)-1, photonpt_bins, len(response_bins)-1, response_bins), 'ref_Pt','ptbalanceL1') return df, histos diff --git a/l1macros/performances_nano.py b/l1macros/performances_nano.py index a69fdd3..05fce16 100644 --- a/l1macros/performances_nano.py +++ b/l1macros/performances_nano.py @@ -103,6 +103,8 @@ def main(): if not 'L1_UnprefireableEvent_FirstBxInTrain' in df.GetColumnNames(): df = df.Define('L1_UnprefireableEvent_FirstBxInTrain','return false;') + df = df.Define('Jet_passJetIdTight', '((abs(Jet_eta)<=2.6)&&(Jet_neHEF < 0.99)&&(Jet_neEmEF<0.9)&&(Jet_chMultiplicity+Jet_neMultiplicity>1)&&(Jet_chHEF>0.01)&&(Jet_chMultiplicity>0)) || ((abs(Jet_eta)>2.6&&abs(Jet_eta)<=2.7)&&(Jet_neHEF<0.90)&&(Jet_neEmEF < 0.99)) || ((abs(Jet_eta)>2.7&&abs(Jet_eta)<=3.0)&&(Jet_neHEF < 0.99)) || ((abs(Jet_eta)>3.0)&&(Jet_neMultiplicity>=2)&&(Jet_neEmEF<0.4))') + df = df.Define('Jet_passJetIdTightLepVeto', '((abs(Jet_eta)<=2.7)&&Jet_passJetIdTight&&(Jet_muEF<0.8)&&(Jet_chEmEF<0.8)) || ((abs(Jet_eta)>2.7)&&Jet_passJetIdTight)') print('There are {} events'.format(nEvents)) From c569eb2812a9dac9586d5728a5166cdd2d263e76 Mon Sep 17 00:00:00 2001 From: Peter Eduard Meiring Date: Thu, 19 Jun 2025 00:24:41 +0200 Subject: [PATCH 4/5] small restructuring of output directories, and add photon/jet response plot --- automation/make_plots.py | 6 ++++-- plotting/make_MuonJet_plots.py | 2 +- plotting/make_PhotonJet_plots.py | 17 ++++++++++++++++- plotting/make_ZToEE_plots.py | 2 +- plotting/make_ZToMuMu_plots.py | 2 +- plotting/make_ZToTauTau_plots.py | 2 +- 6 files changed, 24 insertions(+), 7 deletions(-) diff --git a/automation/make_plots.py b/automation/make_plots.py index fd2237c..57d570a 100644 --- a/automation/make_plots.py +++ b/automation/make_plots.py @@ -19,7 +19,7 @@ # abort plotting if all .png files are newer than all .root files t_newest, t_oldest = 0, 0 root_files = glob(f"{merged_dir}/*.root") - png_files = glob(f"{merged_dir}/plotsL1Run3/*.png") + png_files = glob(f"{merged_dir}/*/*.png") if len(root_files) > 0: t_newest = max(os.path.getctime(f) for f in root_files) if len(png_files) > 0: t_oldest = min(os.path.getctime(f) for f in png_files) if t_oldest > t_newest: @@ -27,8 +27,10 @@ continue for cmd in config["plotting"]: + plotdir="MuonJet" if "MuonJet" in cmd else "ZToTauTau" if "ZToTauTau" in cmd else "PhotonJet" if "PhotonJet" in cmd else "ZToEE" print(80*"#"+'\n'+f"plotting for {merged_dir}") - os.makedirs(merged_dir + '/plotsL1Run3', exist_ok=True) + os.makedirs(merged_dir + '/' + plotdir, exist_ok=True) + os.system('cp /eos/user/p/pmeiring/www/L1Trigger/00_index.php %s/%s/index.php'%(merged_dir,plotdir)) cmd = cmd.replace("$OUTDIR", merged_dir) print(cmd) if htcondor: write_queue(cmd) diff --git a/plotting/make_MuonJet_plots.py b/plotting/make_MuonJet_plots.py index 27e84ac..85d936b 100644 --- a/plotting/make_MuonJet_plots.py +++ b/plotting/make_MuonJet_plots.py @@ -1,5 +1,5 @@ eventselection='#mu+jet' -subfolder='/plotsL1Run3' +subfolder='/MuonJet' channelname='MuonJet' import yaml diff --git a/plotting/make_PhotonJet_plots.py b/plotting/make_PhotonJet_plots.py index 072f6db..eec275c 100644 --- a/plotting/make_PhotonJet_plots.py +++ b/plotting/make_PhotonJet_plots.py @@ -1,5 +1,5 @@ eventselection='#gamma+jet' -subfolder='/plotsL1Run3' +subfolder='/PhotonJet' channelname='PhotonJet' import yaml @@ -754,5 +754,20 @@ def main(): plotname = channelname+'_L1Jet_PtBalancevsRun_singlejet', ) + drawplots.makeprof( + inputFiles_list = [input_file], + saveplot = True, + dirname = args.dir + subfolder, + nvtx_suffix = s, + h2d = ['h_L1PtBalanceVsPhotonPt_singlejet_{}'.format(eta_range) for eta_range in eta_ranges], + xtitle = 'p_{T}^{reco #gamma}', + ytitle = '(p_{T}^{L1 jet}/p_{T}^{reco #gamma})', + extralabel = "#splitline{"+eventselection+", MET<50 GeV}{= 1 clean jet, #Delta#phi(#gamma, jet) > 2.9}", + legendlabels = eta_labels, + top_label = toplabel, + axisranges = [0, 500, 0, 1.5], + plotname = channelname+'_L1Jet_PtBalancevsPhotonPt_singlejet', + ) + if __name__ == '__main__': main() diff --git a/plotting/make_ZToEE_plots.py b/plotting/make_ZToEE_plots.py index f9e210c..9b9061c 100644 --- a/plotting/make_ZToEE_plots.py +++ b/plotting/make_ZToEE_plots.py @@ -1,6 +1,6 @@ # make_ZToEE_plots.py, a program to draw the L1Studies plots obtained from the histograms extracted from NanoAOD eventselection='Z#rightarrow ee' -subfolder='/plotsL1Run3' +subfolder='/ZToEE' channelname='ZToEE' import yaml diff --git a/plotting/make_ZToMuMu_plots.py b/plotting/make_ZToMuMu_plots.py index 849cd4b..428a4e2 100644 --- a/plotting/make_ZToMuMu_plots.py +++ b/plotting/make_ZToMuMu_plots.py @@ -1,6 +1,6 @@ # make_mu_plots.py, a program to draw the L1Studies plots obtained from the histograms extracted from NanoAOD eventselection='Z#rightarrow #mu#mu' -subfolder='/plotsL1Run3' +subfolder='/ZToMuMu' channelname='ZToMuMu' import yaml diff --git a/plotting/make_ZToTauTau_plots.py b/plotting/make_ZToTauTau_plots.py index 5d10935..1afc7e0 100644 --- a/plotting/make_ZToTauTau_plots.py +++ b/plotting/make_ZToTauTau_plots.py @@ -1,5 +1,5 @@ eventselection='#mu+#tau_{h}' -subfolder='/plotsL1Run3' +subfolder='/ZToTauTau' channelname='ZToTauTau' import yaml import drawplots From 0ff06e2bece12796ddb595075bbbbc7259821d50 Mon Sep 17 00:00:00 2001 From: Peter Eduard Meiring Date: Thu, 19 Jun 2025 00:27:56 +0200 Subject: [PATCH 5/5] enable indexing and processing files from DAS (instead of T0) --- automation/make_hists.py | 21 +++++++++++++++++---- automation/submit.txt | 20 +++++++++++++++----- automation/wrapper.py | 15 ++++++++------- l1macros/getDASfiles.sh | 28 ++++++++++++++++++++++++++++ 4 files changed, 68 insertions(+), 16 deletions(-) create mode 100644 l1macros/getDASfiles.sh diff --git a/automation/make_hists.py b/automation/make_hists.py index ee22ee9..3097580 100644 --- a/automation/make_hists.py +++ b/automation/make_hists.py @@ -9,11 +9,24 @@ for label, config in config_file.items(): - + #step 1 - find all files on tier 0 - fnames = [glob(f"{tier0}/{era}/{dataset}/NANOAOD/PromptReco-v*/*/*/*/*/*.root") - for era in config["eras"] for dataset in config["datasets"]] - fnames = [item for sublist in fnames for item in sublist] + # fnames = [glob(f"{tier0}/{era}/{dataset}/NANOAOD/PromptReco-v*/*/*/*/*/*.root") + # for era in config["eras"] for dataset in config["datasets"]] + # fnames = [item for sublist in fnames for item in sublist] + + #step 1 - find all prompt-reco files on DAS + fnames=[] + for dataset in config["datasets"]: + for era in config["eras"]: + # Call DAS to obtain file names + cmd='bash ../l1macros/getDASfiles.sh %s %s'%(dataset, era) + os.system(cmd) + + # Put them in a list + with open('files_das.txt', 'r') as file: + content = file.read() + fnames+=content.split() #step 2 - remove files that have already been processed for file in fnames: diff --git a/automation/submit.txt b/automation/submit.txt index 3f973fb..f367bd0 100644 --- a/automation/submit.txt +++ b/automation/submit.txt @@ -1,11 +1,21 @@ executable = wrapper.py -arguments = $(cmd) -output = logs/$(ClusterId).$(ProcId).out -error = logs/$(ClusterId).$(ProcId).err -log = logs/$(ClusterId).$(ProcId).log -+JobFlavour = espresso +JobBatchName = autoL1TDQMplotter +#output = logs/$(ClusterId).$(ProcId).out +#error = logs/$(ClusterId).$(ProcId).err +#log = logs/$(ClusterId).$(ProcId).log +output = /dev/null +error = /dev/null +log = /dev/null +#+JobFlavour = espresso ++MaxRuntime = 600 + +getenv = True +Proxy_path = /afs/cern.ch/user/p/pmeiring/x509up_u111185 +arguments = $(Proxy_path),$(cmd) + should_transfer_files = yes when_to_transfer_output = on_exit +transfer_input_files = wrapper.py queue cmd from queue.txt \ No newline at end of file diff --git a/automation/wrapper.py b/automation/wrapper.py index 67030d3..cb5bf5b 100755 --- a/automation/wrapper.py +++ b/automation/wrapper.py @@ -1,18 +1,19 @@ #!/bin/python3 +print("Wrapper started") import argparse import os +import sys -automation_path = os.path.dirname(os.path.abspath(__file__)) +automation_path = '/afs/cern.ch/work/p/pmeiring/private/CMS/l1tdpg/autoPlotter/MacrosNtuples/automation' -# parse commands to be executed as arguments -parser = argparse.ArgumentParser(description="wrapper running script on htcondor") -parser.add_argument('cmd', nargs='+', type=str, help='commands to be executed') -args = parser.parse_args() +# Set up proxy for accessing remote files with xrootd +os.environ["X509_USER_PROXY"] = sys.argv[1].split(",")[0] +print(os.environ["X509_USER_PROXY"]) -concatenated_cmd = ' '.join(args.cmd) +# Run command +concatenated_cmd = sys.argv[1].split(",")[1] concatenated_cmd = concatenated_cmd.replace("___", " ") concatenated_cmd = f'cd {automation_path}; ' + concatenated_cmd - print('command executed: ' + concatenated_cmd) os.system(concatenated_cmd) diff --git a/l1macros/getDASfiles.sh b/l1macros/getDASfiles.sh new file mode 100644 index 0000000..aa955da --- /dev/null +++ b/l1macros/getDASfiles.sh @@ -0,0 +1,28 @@ +# Parse arguments +DATASET=$1; shift +ERA=$1; shift + +# Check if a valid proxy exists (at least 1 hour remaining) +if voms-proxy-info --exists --valid 1:00 > /dev/null 2>&1; then + echo "Active proxy found with sufficient validity." +else + echo "No active proxy found or proxy expired. Activating..." + echo "dummypwd" | voms-proxy-init --voms cms + export X509_USER_PROXY=/afs/cern.ch/user/p/pmeiring/x509up_u111185 + echo "dummypwd" | voms-proxy-init --voms cms --valid 144:0 +fi + +# Query DAS to obtain a list of datasets +rm -f files_das.txt +cmd="/cvmfs/cms.cern.ch/common/dasgoclient -query='dataset=/${DATASET}/${ERA}-PromptReco-v*/NANOAOD'" +dgc_datasets=$(eval "$cmd") + +# Query again to obtain all files, and put them in a list +for dataset in $dgc_datasets; do + echo "Processing dataset: $dataset" + cmd="/cvmfs/cms.cern.ch/common/dasgoclient -query='file dataset=$dataset'" + # dgc_files=$(eval "$cmd"); echo $dgc_files >> files_das.txt + eval "$cmd" | while read -r file; do + echo "root://xrootd-cms.infn.it/$file" >> files_das.txt + done +done \ No newline at end of file