Conversation
|
@cms-flaf-bot please test
|
|
pipeline#13922851 started |
|
pipeline#13922851 failed |
|
@cms-flaf-bot please test
|
|
pipeline#13923010 started |
|
pipeline#13923010 failed |
|
@cms-flaf-bot please test
|
|
pipeline#13930990 started |
|
pipeline#13930990 failed |
|
@cms-flaf-bot please test
|
|
pipeline#13931790 started |
|
pipeline#13931790 failed |
|
@cms-flaf-bot please test
|
|
pipeline#13933371 started |
|
pipeline#13933371 failed |
|
@kandrosov tests except Hmumu 2024 passed. |
| final_array = ak.concatenate([final_array, new_array]) | ||
| elif save_as == "json": | ||
| if final_dict is None: | ||
| final_dict = {key: new_array[key] for key in new_array.keys()} |
There was a problem hiding this comment.
| final_dict = {key: new_array[key] for key in new_array.keys()} | |
| final_dict = new_array |
| for key in final_dict.keys(): | ||
| final_dict[key] += new_array[key] |
There was a problem hiding this comment.
| for key in final_dict.keys(): | |
| final_dict[key] += new_array[key] | |
| final_dict = producer.combine_outputs(final_dict, new_array) |
| n_orig = n_orig.GetValue() | ||
| n_final = n_final.GetValue() | ||
| if n_orig != n_final: | ||
| if save_as != "json" and n_orig != n_final: |
There was a problem hiding this comment.
| if save_as != "json" and n_orig != n_final: | |
| if save_as == "root" and n_orig != n_final: |
| # verbosity = ROOT.RLogScopedVerbosity( | ||
| # ROOT.Detail.RDF.RDFLogChannel(), ROOT.ELogLevel.kLogInfo | ||
| # ) | ||
| isData = dataset_name == "data" |
There was a problem hiding this comment.
| isData = dataset_name == "data" | |
| isData = setup.datasets[dataset_name]["process_group"] == "data" |
| parser.add_argument("--saveAs", type=str, default="root") | ||
| parser.add_argument("--isData", action="store_true") | ||
| parser.add_argument("--histTupleDef", type=str) |
There was a problem hiding this comment.
| parser.add_argument("--saveAs", type=str, default="root") | |
| parser.add_argument("--isData", action="store_true") | |
| parser.add_argument("--histTupleDef", type=str) |
Take everything from the setup
| isData = process_group == "data" | ||
|
|
There was a problem hiding this comment.
| isData = process_group == "data" |
| isData=isData, | ||
| btagIntegralRatios=btagIntegralRatios, |
There was a problem hiding this comment.
| isData=isData, | |
| btagIntegralRatios=btagIntegralRatios, |
| n_cpus = copy_param(HTCondorWorkflow.n_cpus, 4) | ||
|
|
||
| def workflow_requires(self): | ||
| correct_btagShape_weights = self.global_params.get("correct_btagShape_weights", False) |
There was a problem hiding this comment.
| correct_btagShape_weights = self.global_params.get("correct_btagShape_weights", False) |
| ps_call(cmd, verbose=1) | ||
|
|
||
|
|
||
| class BtagShapeWeightCorrectionTask(Task, HTCondorWorkflow, law.LocalWorkflow): |
There was a problem hiding this comment.
We don't want a separate task dedicated to btag shape weight corrections. We need AnalysisCacheTask and AnalysisCacheAggregatorTask (if need_aggregator flag is set to true for the producer)
There was a problem hiding this comment.
do not modify RunKit in this PR
No description provided.