diff --git a/.github/integration_cfg.yaml b/.github/integration_cfg.yaml index 076d661d..95787fbb 100644 --- a/.github/integration_cfg.yaml +++ b/.github/integration_cfg.yaml @@ -21,15 +21,16 @@ variables: H_mumu_version: "main" FLAF_version: "default" Corrections_version: "default" + StatInference_version: "default" HH_bbWW_active: "1" HH_bbtautau_active: "1" H_mumu_active: "1" - HH_bbWW_task: "FLAF.Analysis.tasks.HistPlotTask" + HH_bbWW_task: "StatInference.law.tasks.ResonantLimitsAndHistPlotTask" HH_bbtautau_task: "FLAF.Analysis.tasks.HistPlotTask" H_mumu_task: "FLAF.Analysis.tasks.HistPlotTask" - HH_bbWW_args: "--branches 0 --test 1000" - HH_bbtautau_args: "--branches 0 --test 1000" - H_mumu_args: "--branches 0 --test 1000" + HH_bbWW_args: "--test 1000" + HH_bbtautau_args: "--test 1000" + H_mumu_args: "--test 1000" HH_bbtautau_eras: "Run3_2022 Run3_2022EE Run3_2023 Run3_2023BPix" HH_bbWW_eras: "Run3_2022 Run3_2022EE Run3_2023 Run3_2023BPix" H_mumu_eras: "ALL" diff --git a/Analysis/tasks.py b/Analysis/tasks.py index 20609575..aff01635 100644 --- a/Analysis/tasks.py +++ b/Analysis/tasks.py @@ -686,12 +686,7 @@ def run(self): HaddMergedHistsProducer = os.path.join( self.ana_path(), "FLAF", "Analysis", "hadd_merged_hists.py" ) - RenameHistsProducer = os.path.join( - self.ana_path(), "FLAF", "Analysis", "renameHists.py" - ) # this one is not used - input_dir = os.path.join("hists", self.version, self.period, var_name) - input_dir_remote = self.remote_dir_target(input_dir, fs=self.fs_HistTuple) all_datasets = [] local_inputs = [] with contextlib.ExitStack() as stack: @@ -701,9 +696,6 @@ def run(self): local_inputs.append(stack.enter_context(inp.localize("r")).path) dataset_names = ",".join(smpl for smpl in all_datasets) all_outputs_merged = [] - outdir_histograms = os.path.join( - self.version, self.period, "merged", var_name, "tmp" - ) if len(uncNames) == 1: with self.output().localize("w") as outFile: MergerProducer_cmd = [ @@ -727,66 +719,44 @@ def run(self): MergerProducer_cmd.extend(local_inputs) ps_call(MergerProducer_cmd, verbose=1) else: + job_home, remove_job_home = self.law_job_home() for uncName in uncNames: final_histname = f"{var_name}_{uncName}.root" - tmp_outfile_merge = os.path.join(outdir_histograms, final_histname) - tmp_outfile_merge_remote = self.remote_target( - tmp_outfile_merge, fs=self.fs_histograms - ) - with tmp_outfile_merge_remote.localize( - "w" - ) as tmp_outfile_merge_unc: - MergerProducer_cmd = [ - "python3", - MergerProducer, - "--outFile", - tmp_outfile_merge_unc.path, - "--var", - var_name, - "--dataset_names", - dataset_names, - "--uncSource", - uncName, - "--channels", - channels, - "--period", - self.period, - "--LAWrunVersion", - self.version, - ] - MergerProducer_cmd.extend(local_inputs) - ps_call(MergerProducer_cmd, verbose=1) - all_outputs_merged.append(tmp_outfile_merge) - if len(uncNames) > 1: - all_uncertainties_string = ",".join(unc for unc in uncNames) - tmp_outFile = self.remote_target( - os.path.join( - outdir_histograms, f"all_histograms_{var_name}_hadded.root" - ), - fs=self.fs_histograms, - ) - with contextlib.ExitStack() as stack: - local_merged_files = [] - for infile_merged in all_outputs_merged: - tmp_outfile_merge_remote = self.remote_target( - infile_merged, fs=self.fs_histograms - ) - local_merged_files.append( - stack.enter_context( - tmp_outfile_merge_remote.localize("r") - ).path - ) - with self.output().localize("w") as outFile: - HaddMergedHistsProducer_cmd = [ - "python3", - HaddMergedHistsProducer, - "--outFile", - outFile.path, - "--var", - var_name, - ] - HaddMergedHistsProducer_cmd.extend(local_merged_files) - ps_call(HaddMergedHistsProducer_cmd, verbose=1) + tmp_outfile_merge = os.path.join(job_home, final_histname) + MergerProducer_cmd = [ + "python3", + MergerProducer, + "--outFile", + tmp_outfile_merge, + "--var", + var_name, + "--dataset_names", + dataset_names, + "--uncSource", + uncName, + "--channels", + channels, + "--period", + self.period, + "--LAWrunVersion", + self.version, + ] + MergerProducer_cmd.extend(local_inputs) + ps_call(MergerProducer_cmd, verbose=1) + all_outputs_merged.append(tmp_outfile_merge) + with self.output().localize("w") as outFile: + HaddMergedHistsProducer_cmd = [ + "python3", + HaddMergedHistsProducer, + "--outFile", + outFile.path, + "--var", + var_name, + ] + HaddMergedHistsProducer_cmd.extend(all_outputs_merged) + ps_call(HaddMergedHistsProducer_cmd, verbose=1) + if remove_job_home: + shutil.rmtree(job_home) class AnalysisCacheTask(Task, HTCondorWorkflow, law.LocalWorkflow): diff --git a/RunKit b/RunKit index 096388f7..a6336545 160000 --- a/RunKit +++ b/RunKit @@ -1 +1 @@ -Subproject commit 096388f7a1cf581b5faf2b0bdf582e725ec8ac34 +Subproject commit a6336545a10527ade5d3c7bdd46ba8b8aeb01691 diff --git a/env.sh b/env.sh index 91b4ccd7..e571d843 100644 --- a/env.sh +++ b/env.sh @@ -212,7 +212,7 @@ load_flaf_env() { export PATH="$FLAF_COMBINE_PATH/build/bin:$PATH" export LD_LIBRARY_PATH="$FLAF_COMBINE_PATH/build/lib:$LD_LIBRARY_PATH" - export PYTHONPATH="$FLAF_COMBINE_PATH/build/lib/python:$PYTHONPATH" + export PYTHONPATH="$FLAF_COMBINE_PATH/build/python:$PYTHONPATH" if [ -d "$HH_INFERENCE_PATH" ]; then install_inference "$env_file" $node_os $cmb_os $FLAF_COMBINE_VERSION export PYTHONPATH="$HH_INFERENCE_PATH:$PYTHONPATH"