Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 21 additions & 19 deletions automation/config.yaml
Original file line number Diff line number Diff line change
@@ -1,24 +1,25 @@
JetMET:
datasets:
- 'JetMET0'
- 'JetMET1'
eras:
- 'Run2024*'
scripts:
- 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_DiJet.root -c DiJet'
plotting:
- 'python3 ../plotting/make_DiJet_plots.py --dir $OUTDIR --config ../config_cards/full_DiJet.yaml'
# JetMET:
# datasets:
# - 'JetMET0'
# - 'JetMET1'
# eras:
# - 'Run2024*'
# scripts:
# - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_DiJet.root -c DiJet'
# plotting:
# - 'python3 ../plotting/make_DiJet_plots.py --dir $OUTDIR --config ../config_cards/full_DiJet.yaml'

EGamma:
datasets:
- 'EGamma0'
- 'EGamma1'
eras:
- 'Run2024*'
- 'Run2024I'
- 'Run2025*'
scripts:
- 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_PhotonJet.root -c PhotonJet'
- 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToEE.root -c ZToEE'
- 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/oug_zee_dqmoff.root -c ZToEEDQMOff'
# - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/oug_zee_dqmoff.root -c ZToEEDQMOff'
plotting:
- 'python3 ../plotting/make_ZToEE_plots.py --dir $OUTDIR --config ../config_cards/full_ZToEE.yaml'
- 'python3 ../plotting/make_PhotonJet_plots.py --dir $OUTDIR --config ../config_cards/full_PhotonJet.yaml'
Expand All @@ -28,16 +29,17 @@ Muon:
- 'Muon0'
- 'Muon1'
eras:
- 'Run2024*'
- 'Run2024I'
- 'Run2025*'
scripts:
- 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToMuMu.root -c ZToMuMu'
# - 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToMuMu.root -c ZToMuMu'
- 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_MuonJet.root -c MuonJet' #TODO not working
- 'python3 ../l1macros/performances_nano.py -i $INFILE -o $OUTDIR/all_ZToTauTau.root -c ZToTauTau'
- 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_zmumu_dqmoffl.root -c ZToMuMuDQMOff'
- 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_jets_dqmoff.root -c JetsDQMOff'
- 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_ztautau_dqmoff.root -c ZToTauTauDQMOff'
- 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_etsum_dqmoff.root -c EtSumDQMOff'
# - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_zmumu_dqmoffl.root -c ZToMuMuDQMOff'
# - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_jets_dqmoff.root -c JetsDQMOff'
# - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_ztautau_dqmoff.root -c ZToTauTauDQMOff'
# - 'python3 ../l1macros/performances_nano_dqmoff.py -i $INFILE -o $OUTDIR/out_etsum_dqmoff.root -c EtSumDQMOff'
plotting:
- 'python3 ../plotting/make_ZToMuMu_plots.py --dir $OUTDIR --config ../config_cards/full_ZToMuMu.yaml'
# - 'python3 ../plotting/make_ZToMuMu_plots.py --dir $OUTDIR --config ../config_cards/full_ZToMuMu.yaml'
- 'python3 ../plotting/make_ZToTauTau_plots.py --dir $OUTDIR --config ../config_cards/full_ZToTauTau.yaml'
- 'python3 ../plotting/make_MuonJet_plots.py --dir $OUTDIR --config ../config_cards/full_MuonJet.yaml'
57 changes: 52 additions & 5 deletions automation/cron_job.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,55 @@
#!/bin/bash
# python3 make_hists.py
# python3 merge_per_run.py
# python3 merge_per_era.py
# python3 merge_total.py
# python3 make_plots.py

LOCKFILE="cron.lock"
LOGFILE="cron.log"

# Exit if lock file exists
if [ -e "$LOCKFILE" ]; then
# echo "Lock file exists: another job is running." $(date)
exit 1
fi

# Define a cleanup function in case of exit
function cleanup() {
rm -f "$LOCKFILE"
rm -f "$LOGFILE"
}

# Define a waiting function for condor jobs to finish
function waitForCondor() {
while true; do
out=$(condor_q 2>&1)
echo "$out" | grep -q "Failed to fetch ads" && { echo "[waitForCondor] condor_q failed, retrying..."; sleep 60; continue; }
echo "$out" | grep -q "$1" || { echo "[waitForCondor] Job $1 finished."; break; }
sleep 60
done
}

# Create lock file - cron_job is in running state
touch "$LOCKFILE"
trap cleanup EXIT

# Submit histomaker to condor and wait for jobs to finish
echo "Filling histograms..."
python3 make_hists.py --htcondor
condor_submit submit.txt | tee submitinfo
cluster=$(cat submitinfo | grep "submitted to cluster" | sed "s/.*cluster //; s/\.//")
waitForCondor $cluster

# Merge files locally
echo "Merging files..."
python3 merge_per_run.py
python3 -c 'from utils import generate_weekDict; generate_weekDict()'
cp week_runs.csv /eos/user/p/pmeiring/www/L1Trigger/l1dpg/DQM/Weekly/week_runs.txt
python3 merge_per_era.py
python3 merge_total.py

# Submit plotmaker to condor and wait for jobs to finish
echo "Producing plots..."
python3 make_plots.py --htcondor
condor_submit submit.txt | tee submitinfo
cluster=$(cat submitinfo | grep "submitted to cluster" | sed "s/.*cluster //; s/\.//")
waitForCondor $cluster

date
echo "All done!"
21 changes: 17 additions & 4 deletions automation/make_hists.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,24 @@


for label, config in config_file.items():

#step 1 - find all files on tier 0
fnames = [glob(f"{tier0}/{era}/{dataset}/NANOAOD/PromptReco-v*/*/*/*/*/*.root")
for era in config["eras"] for dataset in config["datasets"]]
fnames = [item for sublist in fnames for item in sublist]
# fnames = [glob(f"{tier0}/{era}/{dataset}/NANOAOD/PromptReco-v*/*/*/*/*/*.root")
# for era in config["eras"] for dataset in config["datasets"]]
# fnames = [item for sublist in fnames for item in sublist]

#step 1 - find all prompt-reco files on DAS
fnames=[]
for dataset in config["datasets"]:
for era in config["eras"]:
# Call DAS to obtain file names
cmd='bash ../l1macros/getDASfiles.sh %s %s'%(dataset, era)
os.system(cmd)

# Put them in a list
with open('files_das.txt', 'r') as file:
content = file.read()
fnames+=content.split()

#step 2 - remove files that have already been processed
for file in fnames:
Expand Down
6 changes: 4 additions & 2 deletions automation/make_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,18 @@
# abort plotting if all .png files are newer than all .root files
t_newest, t_oldest = 0, 0
root_files = glob(f"{merged_dir}/*.root")
png_files = glob(f"{merged_dir}/plotsL1Run3/*.png")
png_files = glob(f"{merged_dir}/*/*.png")
if len(root_files) > 0: t_newest = max(os.path.getctime(f) for f in root_files)
if len(png_files) > 0: t_oldest = min(os.path.getctime(f) for f in png_files)
if t_oldest > t_newest:
print('skipping: ' + merged_dir)
continue

for cmd in config["plotting"]:
plotdir="MuonJet" if "MuonJet" in cmd else "ZToTauTau" if "ZToTauTau" in cmd else "PhotonJet" if "PhotonJet" in cmd else "ZToEE"
print(80*"#"+'\n'+f"plotting for {merged_dir}")
os.makedirs(merged_dir + '/plotsL1Run3', exist_ok=True)
os.makedirs(merged_dir + '/' + plotdir, exist_ok=True)
os.system('cp /eos/user/p/pmeiring/www/L1Trigger/00_index.php %s/%s/index.php'%(merged_dir,plotdir))
cmd = cmd.replace("$OUTDIR", merged_dir)
print(cmd)
if htcondor: write_queue(cmd)
Expand Down
6 changes: 3 additions & 3 deletions automation/merge_per_era.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#!/bin/python3

from glob import glob
from utils import hadd, get_weeks, htcondor_flag, dqm_prefix
from utils import hadd, get_weeks, get_weeks_v2, htcondor_flag, dqm_prefix

htcondor = htcondor_flag()

# collect all histogram root files merged by run
all_files = glob(f"{dqm_prefix}/*/*/*/*/*/merged/*.root") #change later to dqm_prefix

weeks = get_weeks()
# weeks = get_weeks()
weeks = get_weeks_v2()

# group files by week and era
file_groups = {}
Expand All @@ -33,7 +34,6 @@
file_groups[target] = []
file_groups[target].append(file)


# Hadd grouped files
for target, files in file_groups.items():
hadd(target, files, htcondor)
20 changes: 15 additions & 5 deletions automation/submit.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
executable = wrapper.py
arguments = $(cmd)
output = logs/$(ClusterId).$(ProcId).out
error = logs/$(ClusterId).$(ProcId).err
log = logs/$(ClusterId).$(ProcId).log
+JobFlavour = espresso
JobBatchName = autoL1TDQMplotter
#output = logs/$(ClusterId).$(ProcId).out
#error = logs/$(ClusterId).$(ProcId).err
#log = logs/$(ClusterId).$(ProcId).log
output = /dev/null
error = /dev/null
log = /dev/null
#+JobFlavour = espresso
+MaxRuntime = 600

getenv = True
Proxy_path = /afs/cern.ch/user/p/pmeiring/x509up_u111185
arguments = $(Proxy_path),$(cmd)


should_transfer_files = yes
when_to_transfer_output = on_exit
transfer_input_files = wrapper.py

queue cmd from queue.txt
76 changes: 74 additions & 2 deletions automation/utils.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import os, subprocess, argparse, uproot
import os, subprocess, argparse, uproot, re, csv
import pandas as pd
from datetime import datetime
from collections import defaultdict

#dqm_prefix = '/eos/cms/store/group/dpg_trigger/comm_trigger/L1Trigger/cmsl1dpg/www/DQM/T0PromptNanoMonit'
dqm_prefix = "/eos/user/l/lebeling/www/DQM"
dqm_prefix = "/eos/user/p/pmeiring/www/L1Trigger/l1dpg/DQM"
tier0 = "/eos/cms/tier0/store/data"


Expand Down Expand Up @@ -56,6 +58,76 @@ def get_weeks(year=2024):

return weeks

def get_weeks_v2(csv_path="run_weeks.csv"):
run_week_dict = {}
try:
with open(csv_path, newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
try:
run = int(row["RunNumber"])
full_week = row["ISO_Week"]
# Extract just the week number (e.g., '2025-W22' -> '22')
week_only = full_week.split("-W")[1]
run_week_dict[run] = week_only
except (ValueError, KeyError, IndexError):
continue # Skip malformed rows
except FileNotFoundError:
print(f"File not found: {csv_path}")
except Exception as e:
print(f"Error reading CSV: {e}")

return run_week_dict

def generate_weekDict(year=2025):
run_week_dict = {}
week_run_dict = defaultdict(list)

for era in os.listdir(tier0):
if not era.startswith(f"Run{year}"):
continue

era_path = os.path.join(tier0, era, "L1Accept/RAW/v1/000/")
if not os.path.isdir(era_path):
continue

for root, dirs, files in os.walk(era_path):
if not root.endswith("/00000"):
continue

match = re.search(r"/(\d{3})/(\d{3})/00000$", root)
if not match:
continue

run_number = int(match.group(1) + match.group(2))

# Get parent directory of "00000", i.e., the run directory
run_dir = os.path.dirname(root)

try:
dir_stat = os.stat(run_dir)
# Use st_mtime (last modification time) or st_ctime (creation time on some systems)
timestamp = dir_stat.st_mtime
dt = datetime.fromtimestamp(timestamp)
iso_week = dt.strftime("%G-W%V")
run_week_dict[run_number] = iso_week
week_run_dict[iso_week].append(run_number)
except Exception:
continue # Skip if stats can't be read

with open("run_weeks.csv", "w", newline="") as f:
writer = csv.writer(f)
writer.writerow(["RunNumber", "ISO_Week"])
for run, week in sorted(run_week_dict.items()):
writer.writerow([run, week])

with open("week_runs.csv", "w", newline="") as f:
writer = csv.writer(f)
writer.writerow(["ISO_Week", "RunNumbers"])
for week, runs in sorted(week_run_dict.items()):
run_list_str = " ".join(str(r) for r in sorted(runs))
writer.writerow([week, run_list_str])


def hadd(target, files, htcondor = False):
os.makedirs(os.path.dirname(target), exist_ok=True)
Expand Down
15 changes: 8 additions & 7 deletions automation/wrapper.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
#!/bin/python3
print("Wrapper started")

import argparse
import os
import sys

automation_path = os.path.dirname(os.path.abspath(__file__))
automation_path = '/afs/cern.ch/work/p/pmeiring/private/CMS/l1tdpg/autoPlotter/MacrosNtuples/automation'

# parse commands to be executed as arguments
parser = argparse.ArgumentParser(description="wrapper running script on htcondor")
parser.add_argument('cmd', nargs='+', type=str, help='commands to be executed')
args = parser.parse_args()
# Set up proxy for accessing remote files with xrootd
os.environ["X509_USER_PROXY"] = sys.argv[1].split(",")[0]
print(os.environ["X509_USER_PROXY"])

concatenated_cmd = ' '.join(args.cmd)
# Run command
concatenated_cmd = sys.argv[1].split(",")[1]
concatenated_cmd = concatenated_cmd.replace("___", " ")
concatenated_cmd = f'cd {automation_path}; ' + concatenated_cmd

print('command executed: ' + concatenated_cmd)
os.system(concatenated_cmd)
Loading