diff --git a/.gitignore b/.gitignore index 203765b3a..785d5d489 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,7 @@ Pipfile.lock *.run *.DS_Store .vscode/settings.json +*.save +*.tar.gz +cpptools/src/TGlauberMC/ +cpptools/src/TennGen/ diff --git a/cpptools/src/rutil/rutil.cxx b/cpptools/src/rutil/rutil.cxx index 1df9b6f59..ca1e86deb 100644 --- a/cpptools/src/rutil/rutil.cxx +++ b/cpptools/src/rutil/rutil.cxx @@ -26,7 +26,7 @@ namespace RUtil std::cout << val << " " << cell << std::endl; exit(0); } - + //--------------------------------------------------------------- // Rebin 2D histogram h with name hname using axes given by x_bins and y_bins // If move_y_underflow is set, y-underflow bins in h_to_rebin are added to (x, 1) bin @@ -88,7 +88,7 @@ namespace RUtil //--------------------------------------------------------------- - // Same function, but overloaded for TH2D + // Same function, but overloaded for TH2D //--------------------------------------------------------------- TH2D* HistUtils::rebin_th2(TH2D & h_to_rebin, char* hname, double* x_bins, int n_x_bins, double* y_bins, int n_y_bins, bool move_y_underflow /*= false*/) { @@ -207,6 +207,70 @@ namespace RUtil } // rebin_thn + //--------------------------------------------------------------- + // Rebin THn according to specified binnings; return pointer to rebinned THn + // Overloaded function that takes TH2 for prior variation + //--------------------------------------------------------------- + THnF* HistUtils::rebin_thn_th2prior( + const std::string & response_file_name, + const THnF* thn, + const std::string & name_thn_rebinned, + const std::string & name_roounfold, + const int & n_dim, + const int & n_pt_bins_det, + const double* det_pt_bin_array, + const int & n_obs_bins_det, + const double* det_bin_array, + const int & n_pt_bins_truth, + const double* truth_pt_bin_array, + const int & n_obs_bins_truth, + const double* truth_bin_array, + const std::string & label/*=""*/, + const double & prior_variation_parameter/*=0.*/, + const TH2* prior_variation/*=nullptr*/, + const bool move_underflow/*=false*/, + const bool use_miss_fake/*=false*/, + const bool do_roounfoldresponse/*=true*/) { + + // ------------------------------------------------------ + // Create empty THn with specified binnings + THnF* thn_rebinned = this->create_empty_thn(name_thn_rebinned.c_str(), n_dim, + n_pt_bins_det, det_pt_bin_array, + n_obs_bins_det, det_bin_array, + n_pt_bins_truth, truth_pt_bin_array, + n_obs_bins_truth, truth_bin_array); + + for (unsigned int i = 0; i < n_dim; i++) { + thn_rebinned->GetAxis(i)->SetTitle(thn->GetAxis(i)->GetTitle()); + } + + // ------------------------------------------------------ + // Create RooUnfoldResponse + RooUnfoldResponse* roounfold_response = ( + do_roounfoldresponse ? + this->create_empty_roounfoldresponse(thn_rebinned, name_roounfold, label) : nullptr); + + // Loop through THn and fill rebinned THn and RooUnfoldResponse + float min_det_pt = det_pt_bin_array[0]; + float min_truth_pt = truth_pt_bin_array[0]; + float min_det = det_bin_array[0]; + float min_truth = truth_bin_array[0]; + float max_det_pt = det_pt_bin_array[n_pt_bins_det]; + float max_truth_pt = truth_pt_bin_array[n_pt_bins_truth]; + float max_det = det_bin_array[n_obs_bins_det]; + float max_truth = truth_bin_array[n_obs_bins_truth]; + + this->fill_rebinned_thn(response_file_name, thn, thn_rebinned, n_dim, + do_roounfoldresponse, roounfold_response, + min_det_pt, min_truth_pt, min_det, min_truth, + max_det_pt, max_truth_pt, max_det, max_truth, + prior_variation, prior_variation_parameter, + move_underflow, use_miss_fake); + + return thn_rebinned; + + } // rebin_thn + //--------------------------------------------------------------- // Create an empty THn according to specified binnings //--------------------------------------------------------------- @@ -215,7 +279,7 @@ namespace RUtil const int & n_obs_bins_det, const double* det_bin_array, const int & n_pt_bins_truth, const double* truth_pt_bin_array, const int & n_obs_bins_truth, const double* truth_bin_array) { - + // Obviously only working for n_dim == 4 at the moment if (n_dim != 4) { std::cerr << "ERROR: Not Implemented: Assertion n_dim == 4 failed in " @@ -311,6 +375,165 @@ namespace RUtil const unsigned int n_bins_2 = thn->GetAxis(2)->GetNbins(); const unsigned int n_bins_3 = thn->GetAxis(3)->GetNbins(); + // Typically the n_dim = 4 setting is + // [pT_det, pT_truth, obs_det, obs_truth] + + // I don't find any global bin index implementation, so I manually loop through axes + int* global_bin = new int[n_dim]; + double* x = new double[n_dim]; + for (unsigned int bin_1 = 0; bin_1 < n_bins_1+2; bin_1++) { // pT-truth + global_bin[1] = bin_1; + x[1] = thn->GetAxis(1)->GetBinCenter(bin_1); + + // print helpful message while waiting + std::cout << bin_1 << " / " << n_bins_1 << '\r' << std::flush; + + /* // Sum of obs_true in this pT_true bin + thn->GetAxis(1)->SetRange(bin_1, bin_1+1); + TH2* proj = (TH2*) thn->Projection(3,1); + double base_content = proj->ProjectionY()->Integral(); + delete proj; + thn->GetAxis(1)->SetRange(0, 0); // unset range */ + + for (unsigned int bin_0 = 0; bin_0 < n_bins_0+2; bin_0++) { // pT-det + global_bin[0] = bin_0; + x[0] = thn->GetAxis(0)->GetBinCenter(bin_0); + + for (unsigned int bin_2 = 0; bin_2 < n_bins_2+2; bin_2++) { + global_bin[2] = bin_2; + x[2] = thn->GetAxis(2)->GetBinCenter(bin_2); + + for (unsigned int bin_3 = 0; bin_3 < n_bins_3+2; bin_3++) { + global_bin[3] = bin_3; + x[3] = thn->GetAxis(3)->GetBinCenter(bin_3); + + int bin = thn->GetBin(global_bin); + double content = thn->GetBinContent(bin); + if (content == 0) { continue; } + double error = thn->GetBinError(bin); + + // Impose a custom prior, if desired + if (std::abs(prior_variation_parameter) > 1e-5 && x[1] > 0 && x[3] > 0) { + + // Scale number of counts according to variation of pt & observable prior + int sign = prior_variation_parameter / std::abs(prior_variation_parameter); + double scale_factor = std::pow(x[1], sign * 0.5) * + (*prior_scale_f)(x[3], content /* / base_content*/, prior_variation_parameter); + + content *= scale_factor; + error *= scale_factor; + + } // scale prior + + // If underflow bin, and if move_underflow flag is activated, + // put the contents of the underflow bin into first bin of thn_rebinned + if (bin_2 == 0 || bin_3 == 0) { + if (move_underflow) { + if (bin_2 == 0) { + x[2] = thn_rebinned->GetAxis(2)->GetBinCenter(1); + } // bin_2 == 0 + if (bin_3 == 0) { + x[3] = thn_rebinned->GetAxis(3)->GetBinCenter(1); + std::string name(thn->GetName()); + if (name.find("matched") != std::string::npos) { + if (bin_2 == 0) { content = 1; } + else { content = 0; } + } + } // bin_3 == 0 + } + } // underflow bins + + // THn is filled as (x[0], x[1], x[2], x[3]) + // corresponding e.g. to (pt_det, pt_true, obs_det, obs_true) + bin = thn_rebinned->GetBin(x); + double prev_content = thn_rebinned->GetBinContent(bin); + double prev_error2 = thn_rebinned->GetBinError2(bin); + thn_rebinned->SetBinContent(bin, prev_content + content); + thn_rebinned->SetBinError(bin, std::sqrt(prev_error2 + std::pow(error, 2))); + + // RooUnfoldResponse should be filled as (x[0], x[2], x[1], x[3]) + // corresponding e.g. to (pt_det, obs_det, pt_true, obs_true) + if (do_roounfoldresponse) { + + bool pt_in_det_range = x[0] > min_det_pt && x[0] < max_det_pt; + bool obs_in_det_range = x[2] > min_det && x[2] < max_det; + bool pt_in_true_range = x[1] > min_truth_pt && x[1] < max_truth_pt; + bool obs_in_true_range = x[3] > min_truth && x[3] < max_truth; + + bool in_det_range = pt_in_det_range && obs_in_det_range; + bool in_true_range = pt_in_true_range && obs_in_true_range; + + // Fill if both det, true are in domain of RM + if (in_det_range and in_true_range) { + roounfold_response->FillContentError(x[0], x[2], x[1], x[3], content, error); + } + + if (use_miss_fake) { + + // If input is not in det-range (this is our usual kinematic efficiency correction), Miss + if (!in_det_range && in_true_range) { + roounfold_response->Miss(x[1], x[3], content); + } + // If truth-level is outside RM range (e.g. jet pt range is technically not [0,\infty]), Fake + // This is usually a negligible correction for us + else if (in_det_range && !in_true_range) { + roounfold_response->Fake(x[0], x[2], content); + } + } + } + } // bin_3 loop + } // bin_2 loop + } // bin_0 loop + } // bin_1 loop + + std::cout << "writing response..." << std::endl; + TFile f(response_file_name.c_str(), "UPDATE"); + thn_rebinned->Write(); + if (do_roounfoldresponse) { roounfold_response->Write(); } + f.Close(); + std::cout << "done" << std::endl; + + // clean up memory + delete[] global_bin; + delete[] x; + + return; + + } // fill_rebinned_thn + + //--------------------------------------------------------------- + // Fill thn_rebinned with data from thn + // + // Don't include underflow/overflow by default + // If move_underflow = True, then fill underflow content of the observable + // (from original THn) into first bin (of rebinned THn) + // + // Overloaded definition with prior_variation a TH2 + void HistUtils::fill_rebinned_thn( + const std::string & response_file_name, const THnF* thn, THnF* thn_rebinned, + const unsigned int & n_dim, const bool do_roounfoldresponse/*=true*/, + RooUnfoldResponse* roounfold_response/*=nullptr*/, + const float min_det_pt, const float min_truth_pt, + const float min_det, const float min_truth, + const float max_det_pt, const float max_truth_pt, + const float max_det, const float max_truth, + const TH2* prior_variation/*=nullptr*/, const double & prior_variation_parameter/*=0.*/, + const bool move_underflow/*=false*/, const bool use_miss_fake/*=false*/) { + + // Only working for n_dim == 4 at the moment; generalizing to N dimensions + // will require some sort of recursive implementation + if (n_dim != 4) { + std::cerr << "ERROR: Not Implemented: Assertion n_dim == 4 failed in " + << "fjtools.cxx::fill_rebinned_thn()" << std::endl; + std::terminate(); + } + + // loop through all axes + const unsigned int n_bins_0 = thn->GetAxis(0)->GetNbins(); + const unsigned int n_bins_1 = thn->GetAxis(1)->GetNbins(); + const unsigned int n_bins_2 = thn->GetAxis(2)->GetNbins(); + const unsigned int n_bins_3 = thn->GetAxis(3)->GetNbins(); + // I don't find any global bin index implementation, so I manually loop through axes int* global_bin = new int[n_dim]; double* x = new double[n_dim]; @@ -339,11 +562,11 @@ namespace RUtil double error = thn->GetBinError(bin); // Impose a custom prior, if desired - if (std::abs(prior_variation_parameter) > 1e-5 && x[1] > 0 && x[3] > 0) { + if (prior_variation != nullptr) { // Scale number of counts according to variation of pt & observable prior double scale_factor = std::pow(x[1], prior_variation_parameter) * - (*prior_scale_f)(x[3], content, prior_variation_parameter); + prior_variation->GetBinContent(bin_3, bin_1); content *= scale_factor; error *= scale_factor; @@ -379,7 +602,7 @@ namespace RUtil // RooUnfoldResponse should be filled as (x[0], x[2], x[1], x[3]) // corresponding e.g. to (pt_det, obs_det, pt_true, obs_true) if (do_roounfoldresponse) { - + bool pt_in_det_range = x[0] > min_det_pt && x[0] < max_det_pt; bool obs_in_det_range = x[2] > min_det && x[2] < max_det; bool pt_in_true_range = x[1] > min_truth_pt && x[1] < max_truth_pt; @@ -387,12 +610,12 @@ namespace RUtil bool in_det_range = pt_in_det_range && obs_in_det_range; bool in_true_range = pt_in_true_range && obs_in_true_range; - + // Fill if both det, true are in domain of RM if (in_det_range and in_true_range) { roounfold_response->FillContentError(x[0], x[2], x[1], x[3], content, error); } - + if (use_miss_fake) { // If input is not in det-range (this is our usual kinematic efficiency correction), Miss @@ -423,7 +646,7 @@ namespace RUtil delete[] x; return; - + } // fill_rebinned_thn //--------------------------------------------------------------- @@ -446,6 +669,8 @@ namespace RUtil return prior_scale_func_3; case 4: return prior_scale_func_4; + case 5: + return prior_scale_func_5; default: return prior_scale_func_def; } @@ -469,7 +694,10 @@ namespace RUtil double prior_scale_func_2(const double & obs_true, const double & content, const double & prior_variation_parameter) { // sharpening/smoothing the distributions - return std::pow(content, 1 + prior_variation_parameter); + double factor = std::pow(content, prior_variation_parameter); + if (factor > 1.5) { return 1.5; } + else if (factor < 0.5) { return 0.5; } + return factor; } double prior_scale_func_3(const double & obs_true, const double & content, @@ -479,7 +707,7 @@ namespace RUtil double prior_scale_func_4(const double & obs_true, const double & content, const double & prior_variation_parameter) { - + // Ax+B, where A=slope, B=offset at z=0 // For 0.7 A = 1/dz, B = 1-(1-dz/2)*A float dz = 0.3; @@ -487,9 +715,17 @@ namespace RUtil return (A*obs_true + 1 - (1-dz/2.)*A); } + double prior_scale_func_5(const double & obs_true, const double & content, + const double & prior_variation_parameter) { + // linear scaling by +/- fraction between 0 and prior_var_param + double frac = 0.2; + return std::abs(1 + frac * (2 * obs_true / prior_variation_parameter - 1)); + } + double prior_scale_func_def(const double & obs_true, const double & content, const double & prior_variation_parameter) { - return obs_true; + // In default case, do not apply any scaling + return 1; } @@ -505,9 +741,242 @@ namespace RUtil return; } - // Create and return 2D histogram, convolving h with shape function - // ob & pT bins are identical in both old & new histograms - // obs & pTs are arrays of the central bin values + //--------------------------------------------------------------- + // Remove outliers from a TH1 via "simple" method: + // delete any bin contents with N counts < limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + TH1* HistUtils::simpleRemoveOutliers(TH1* hist, bool verbose, int limit) { + + if (verbose) { + std::cout << "Applying simple removal of outliers with counts < " + << limit << " for " << hist->GetName() << std::endl; + } + + for (int i = 1; i <= hist->GetNcells(); i++) { + if (hist->GetBinContent(i) < limit) { + hist->SetBinContent(i, 0); + hist->SetBinError(i, 0); + } + } + + return hist; + } + + //--------------------------------------------------------------- + // Remove outliers from a TH1 via pT-hat method: + // delete any bin contents with pT > limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + TH1* HistUtils::pThatRemoveOutliers(TH1* hist, bool verbose, const double & limit) { + + // First search for the pT bin to start the cut + int max_cell_number = -1; + // Assume that pT is on the x-axis + for (int i = 1; i <= hist->GetNbinsX(); i++) { + if (hist->GetXaxis()->GetBinLowEdge(i) > limit) { + max_cell_number = hist->GetBin(i, 0, 0); + break; + } + } + + // Apply cut if necessary + if (max_cell_number >= 0) { + if (verbose) { + std::cout << "Applying pT-hat removal of outliers for pT > " + << limit << " for " << hist->GetName() << std::endl; + } + + for (int i = max_cell_number; i <= hist->GetNcells(); i++) { + if (hist->GetBinContent(i)) { + hist->SetBinContent(i, 0); + hist->SetBinError(i, 0); + } + } + } + + return hist; + } + + //--------------------------------------------------------------- + // Remove outliers from a THn via pT-hat method: + // delete any bin contents with pT_truth > limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + THn* HistUtils::pThatRemoveOutliers(THn* hist, bool verbose, const double & limit, int dim, int pTdim) { + + // Safety check + if (dim != 4) { + std::cerr << "ERROR: THn of dim != 4 not yet implemented" << std::endl; + throw dim; + } + if (pTdim != 1) { + std::cerr << "ERROR: pTdim != 1 not yet implemented" << std::endl; + throw pTdim; + } + + // First search for the pT bin to start the cut + int max_bin_number = -1; + for (int i = 1; i <= hist->GetAxis(pTdim)->GetNbins(); i++) { + if (hist->GetAxis(pTdim)->GetBinLowEdge(i) > limit) { + max_bin_number = i; + break; + } + } + + if (max_bin_number >= 0) { + if (verbose) { + std::cout << "Applying pT-hat removal of outliers for pT > " + << limit << " for " << hist->GetName() << std::endl; + } + + for (int i = max_bin_number; i <= hist->GetAxis(pTdim)->GetNbins(); i++) { + for (int j = 0; j <= hist->GetAxis(0)->GetNbins(); j++) { + for (int k = 0; k <= hist->GetAxis(2)->GetNbins(); k++) { + for (int l = 0; l <= hist->GetAxis(3)->GetNbins(); l++) { + int datapoint[4] = {j, i, k, l}; + if (hist->GetBinContent(datapoint)) { + hist->SetBinContent(datapoint, 0); + hist->SetBinError(datapoint, 0); + } + } + } + } + } + } + + return hist; + } + + /* Recursive method -- unverified code! + //--------------------------------------------------------------- + // Remove outliers from a THn via pT-hat method: + // delete any bin contents with pT_truth > limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + THn* HistUtils::pThatRemoveOutliers(THn* hist, bool verbose, const double & limit, int dim, int pTdim) { + + // Safety check + if (pTdim > dim) { + std::cerr << "ERROR: cannot have pTdim = " << pTdim + << " for a THn of dim = " << dim << std::endl; + throw pTdim; + } + + // First search for the pT bin to start the cut + int max_bin_number = -1; + for (int i = 1; i <= hist->GetAxis(pTdim)->GetNbins(); i++) { + if (hist->GetAxis(pTdim)->GetBinLowEdge(i) > limit) { + max_bin_number = i; + break; + } + } + + if (max_bin_number >= 0) { + if (verbose) { + std::cout << "Applying pT-hat removal of outliers for pT > " + << limit << " for " << hist->GetName() << std::endl; + } + + int n_bins[dim] = { 0 }; + for (int d = 0; d < dim; d++) { + n_bins[d] = hist->GetAxis(d)->GetNbins(); + } + + // Recursively iterate through dimensions to remove extra counts + int x[dim] = { 0 }; + int dim_to_update = 0; + pThatRemoveOutliersTHn_recurse( + hist, limit, dim, pTdim, max_bin_number, n_bins, x, dim_to_update, verbose); + + } + + return hist; + } + + //--------------------------------------------------------------- + void HistUtils::pThatRemoveOutliersTHn_recurse( + THn* hist, int limit, int dim, int pTdim, int max_bin_number, + int* n_bins, int* x, int dim_to_update, bool verbose) { + + // In the deepest recursion case, empty the bin and return + if (dim_to_update == dim) { + if (hist->GetBinContent(x)) { + if (verbose) { + std::cout << "Erasing point " << x << " from " << hist->GetName() + << " with min pT-truth " << hist->GetAxis(pTdim)->GetBinLowEdge(x[pTdim]) + << std::endl; + } + hist->SetBinContent(x, 0); + hist->SetBinError(x, 0); + } + return; + } + + // Otherwise, go to next dimension + for (int i = 1; i <= n_bins[dim_to_update]; i++) { + if (dim_to_update == pTdim && i < max_bin_number) { + continue; + } + x[dim_to_update] = i; + simpleRemoveOutliersTHn_recurse(hist, limit, dim, n_bins, x, dim_to_update+1); + } + + return; + } + */ + + //--------------------------------------------------------------- + // Remove outliers from a THn via "simple" method: + // delete any bin contents with N counts < limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + THn* HistUtils::simpleRemoveOutliersTHn(THn* hist, bool verbose, int limit, int dim) { + + if (verbose) { + std::cout << "Applying simple removal of outliers with counts < " + << limit << " for " << hist->GetName() << std::endl; + } + + int n_bins[dim] = { 0 }; + for (int d = 0; d < dim; d++) { + n_bins[d] = hist->GetAxis(d)->GetNbins(); + } + + // Recursively iterate through dimensions to remove extra counts + int x[dim] = { 0 }; + int dim_to_update = 0; + simpleRemoveOutliersTHn_recurse(hist, limit, dim, n_bins, x, dim_to_update); + + return hist; + } + + //--------------------------------------------------------------- + void HistUtils::simpleRemoveOutliersTHn_recurse( + THn* hist, int limit, int dim, int* n_bins, int* x, int dim_to_update) { + + // In the deepest recursion case, check bin and return + if (dim_to_update == dim) { + if (hist->GetBinContent(x) < limit) { + hist->SetBinContent(x, 0); + hist->SetBinError(x, 0); + } + return; + } + + // Otherwise, go to next dimension + for (int i = 1; i <= n_bins[dim_to_update]; i++) { + x[dim_to_update] = i; + simpleRemoveOutliersTHn_recurse(hist, limit, dim, n_bins, x, dim_to_update+1); + } + + return; + } + + //--------------------------------------------------------------- + // Create and return 2D histogram, convolving h with shape function + // ob & pT bins are identical in both old & new histograms + // obs & pTs are arrays of the central bin values TH2D* HistUtils::convolve_F_np(const double & Omega, const double & R, const double & beta, const double* ob_bins, const int & n_ob_bins, const double* obs, const double* ob_bin_width, diff --git a/cpptools/src/rutil/rutil.hh b/cpptools/src/rutil/rutil.hh index 417d428a2..c5b289f0f 100644 --- a/cpptools/src/rutil/rutil.hh +++ b/cpptools/src/rutil/rutil.hh @@ -19,7 +19,7 @@ namespace RUtil virtual ~Test() {;} void setMember(Double_t v) {fMember = v;} Double_t getMember() {return fMember;} - + private: Double_t fMember; @@ -41,13 +41,15 @@ namespace RUtil const double & prior_variation_parameter); double prior_scale_func_4(const double & obs_true, const double & content, const double & prior_variation_parameter); + double prior_scale_func_5(const double & obs_true, const double & content, + const double & prior_variation_parameter); double prior_scale_func_def(const double & obs_true, const double & content, const double & prior_variation_parameter); - void delete_h(TH2* h); - void delete_h(THn* h); + void delete_h(TH2* h); + void delete_h(THn* h); - bool* sorted_match(const int* a, const int a_len, const int* b, const int b_len); + bool* sorted_match(const int* a, const int a_len, const int* b, const int b_len); //------------------------------------------------------ // Rebinning utilities @@ -57,7 +59,7 @@ namespace RUtil HistUtils() : TObject() {;} virtual ~HistUtils() {;} - + // Rebin 2D histogram h with name hname using axes given by x_bins and y_bins TH2F* rebin_th2(TH2F & h_to_rebin, char* hname, double* x_bins, int n_x_bins, double* y_bins, int n_y_bins, bool move_y_underflow = false); @@ -88,19 +90,71 @@ namespace RUtil const bool use_miss_fake=false, const bool do_roounfoldresponse=true); - //------------------------------------------------------ - // Convolution of nonperturbative shape functions - - // Create and return 2D histogram, convolving h with shape function - TH2D* convolve_F_np(const double & Omega, const double & R, const double & beta, - const double* ob_bins, const int & n_ob_bins, const double* obs, - const double* ob_bin_width, - const double* pT_bins, const int & n_pT_bins, const double* pTs, - const TH2D & h, const std::string & name, const bool groomed = false, - const double & sd_beta = 0, const double & sd_zcut = 0.2, - const std::string & option = ""); - - double find_cell(double val, const double * cell, const int range, bool phi); + // Rebin N-dimensional THn to a new histogram with name name_thn_rebinned using provided axes + // WARNING: currently requires n_dim = 4 + // Alternate definition to take TH2 as prior variation + THnF* rebin_thn_th2prior( + const std::string & response_file_name, + const THnF* thn, + const std::string & name_thn_rebinned, + const std::string & name_roounfold, + const int & n_dim, + const int & n_pt_bins_det, + const double* det_pt_bin_array, + const int & n_obs_bins_det, + const double* det_bin_array, + const int & n_pt_bins_truth, + const double* truth_pt_bin_array, + const int & n_obs_bins_truth, + const double* truth_bin_array, + const std::string & label="", + const double & prior_variation_parameter=0., + const TH2* prior_variation=nullptr, + const bool move_underflow=false, + const bool use_miss_fake=false, + const bool do_roounfoldresponse=true); + + //--------------------------------------------------------------- + // Remove outliers from a TH1 via "simple" method: + // delete any bin contents with N counts < limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + TH1* simpleRemoveOutliers(TH1* hist, bool verbose, int limit); + + //--------------------------------------------------------------- + // Remove outliers from a TH1 via pT-hat method: + // delete any bin contents with pT > limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + TH1* pThatRemoveOutliers(TH1* hist, bool verbose, const double & limit); + + //--------------------------------------------------------------- + // Remove outliers from a THn via "simple" method: + // delete any bin contents with N counts < limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + THn* simpleRemoveOutliersTHn(THn* hist, bool verbose, int limit, int dim); + + //--------------------------------------------------------------- + // Remove outliers from a THn via pT-hat method: + // delete any bin contents with pT_truth > limit + // Modifies histogram in-place and returns its pointer + //--------------------------------------------------------------- + THn* pThatRemoveOutliers(THn* hist, bool verbose, const double & limit, int dim, int pTdim); + + //------------------------------------------------------ + // Convolution of nonperturbative shape functions + + // Create and return 2D histogram, convolving h with shape function + TH2D* convolve_F_np(const double & Omega, const double & R, const double & beta, + const double* ob_bins, const int & n_ob_bins, const double* obs, + const double* ob_bin_width, + const double* pT_bins, const int & n_pT_bins, const double* pTs, + const TH2D & h, const std::string & name, const bool groomed = false, + const double & sd_beta = 0, const double & sd_zcut = 0.2, + const std::string & option = ""); + + double find_cell(double val, const double * cell, const int range, bool phi); private: // Create empty THn using provided axes @@ -131,18 +185,45 @@ namespace RUtil const bool move_underflow=false, const bool use_miss_fake=false); + // Fill empty thn_rebinned with data from thn + // Overloaded function to take TH2 as prior variation + void fill_rebinned_thn(const std::string & response_file_name, const THnF* thn, + THnF* thn_rebinned, const unsigned int & n_dim, + const bool do_roounfoldresponse=true, + RooUnfoldResponse* roounfold_response=nullptr, + const float min_det_pt=0., + const float min_truth_pt=0., + const float min_det=0., + const float min_truth=0., + const float max_det_pt=0., + const float max_truth_pt=0., + const float max_det=0., + const float max_truth=0., + const TH2* prior_variation=nullptr, + const double & prior_variation_parameter=0., + const bool move_underflow=false, + const bool use_miss_fake=false); + // Set scaling of prior prior_scale_func prior_scale_factor_obs(const int & option); - //------------------------------------------------------ - // Convolution of nonperturbative shape functions + // Recursive helper function for simpleRemoveOutliersTHn() + void simpleRemoveOutliersTHn_recurse( + THn* hist, int limit, int dim, int* n_bins, int* x, int dim_to_update); + // Recursive helper function for pThatRemoveOutliersTHn() + //void pThatRemoveOutliersTHn_recurse( + // THn* hist, int limit, int dim, int pTdim, int max_bin_number, + // int* n_bins, int* x, int dim_to_update, bool verbose); + + //------------------------------------------------------ + // Convolution of nonperturbative shape functions // Non-perturbative parameter with factored-out beta dependence // Omega is Omega_{a=0} == Omega_{beta=2} (universal[?]) - inline double Omega_beta(const double & Omega, const double & beta); + inline double Omega_beta(const double & Omega, const double & beta); - // Shape function for convolving nonperturbative effects - inline double F_np(const double & Omega, const double & k, const double & beta); + // Shape function for convolving nonperturbative effects + inline double F_np(const double & Omega, const double & k, const double & beta); ClassDef(HistUtils, 1) }; diff --git a/pyjetty/alice_analysis/analysis/user/ang/PbPb/pT_cut_comp.cxx b/pyjetty/alice_analysis/analysis/user/ang/PbPb/pT_cut_comp.cxx new file mode 100644 index 000000000..85c5958e8 --- /dev/null +++ b/pyjetty/alice_analysis/analysis/user/ang/PbPb/pT_cut_comp.cxx @@ -0,0 +1,66 @@ +// ROOT Macro for comparing ROOT THn before and after pT-hat based cut +// Written by Ezra Lesser, August 2022 +#include +#include "THn.h" +#include "TH2.h" + +int pT_cut_comp() { + + // Open up trimmed and untrimmed ROOT files + std::string base_dir = "/rstorage/alice/AnalysisResults/ang/877553/"; + std::string untrimmed_dir = base_dir + "Scaled_no_cut/"; + std::string trimmed_dir = base_dir + "pt_trimmed_piecewise/"; + std::string file_name = "AnalysisResults.root"; + + // Jet parameters + std::string obs = "ang"; + std::string jetR = "0.2"; // "0.4"; + std::string Rmax = "0.1"; // "0.25"; + //std::vector alphas = {"1", "1.5", "2", "3"}; + std::vector alphas = {"1"}; + + for (int pThatBin = 1; pThatBin <= 20; pThatBin++) { // loop over pT-hat bins + // Open ROOT files + TFile* file_untrimmed = new TFile((untrimmed_dir + std::to_string(pThatBin) + '/' + file_name).c_str(), "r"); + TFile* file_trimmed = new TFile((trimmed_dir + std::to_string(pThatBin) + '/' + file_name).c_str(), "r"); + + for (const std::string & alpha : alphas) { + // Retrieve RM for making projections + std::string RM_name = + "hResponse_JetPt_" + obs + "_R" + jetR + '_' + alpha + "_Rmax" + Rmax + "Scaled"; + THn* RM_untrimmed = (THn*) file_untrimmed->Get(RM_name.c_str()); + RM_untrimmed->SetNameTitle("untrimmed", "untrimmed"); + THn* RM_trimmed = (THn*) file_trimmed->Get(RM_name.c_str()); + + // Project out the x-y (pT) distributions + TH2D* pT_untrimmed = RM_untrimmed->Projection(1, 0); + TH2D* pT_trimmed = RM_trimmed->Projection(1, 0); + + // Create ratio trimmed/untrimmed and write to file + TH2D* pT_ratio = (TH2D*) pT_trimmed->Clone((RM_name + "_ratio").c_str()); + pT_ratio->Divide(pT_untrimmed); + + const char canvas_name[] = "pT ratio trimmed/untrimmed"; + TCanvas c(canvas_name, canvas_name, 700, 600); + c.Draw(); + + const char pad_name[] = "the pad"; + TPad pad(pad_name, pad_name, 0, 0, 1, 1); + pad.SetTicks(1, 1); + pad.SetLeftMargin(0.1); + pad.SetRightMargin(0.05); + pad.Draw(); + + pT_ratio->Draw("colz"); + gStyle->SetOptStat(0); + c.SaveAs((RM_name + "_ratio_pThat" + std::to_string(pThatBin) + ".pdf").c_str()); + delete pT_untrimmed, pT_trimmed, pT_ratio; + } + + file_untrimmed->Close(); + file_trimmed->Close(); + delete file_untrimmed, file_trimmed; + } + + return 0; +} diff --git a/pyjetty/alice_analysis/analysis/user/ang/PbPb/plotting_utils_ang.py b/pyjetty/alice_analysis/analysis/user/ang/PbPb/plotting_utils_ang.py new file mode 100755 index 000000000..74a0d744c --- /dev/null +++ b/pyjetty/alice_analysis/analysis/user/ang/PbPb/plotting_utils_ang.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +""" + Plotting utilities for jet substructure analysis with track dataframe. + + Author: James Mulligan (james.mulligan@berkeley.edu) +""" + +from __future__ import print_function + +# General +import os +import sys +import math +import yaml + +# Data analysis and plotting +import numpy as np +from array import * +import ROOT + +# Base class +from pyjetty.alice_analysis.analysis.user.james import plotting_utils_base + +################################################################ +class PlottingUtils(plotting_utils_base.PlottingUtilsBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, output_dir = '.', config_file = '', R_max = None, thermal = False, groomer_studies = False, **kwargs): + super(PlottingUtils, self).__init__(output_dir, config_file, R_max, thermal, groomer_studies, **kwargs) + + print(self) + diff --git a/pyjetty/alice_analysis/analysis/user/ang/PbPb/run_analysis_ang.py b/pyjetty/alice_analysis/analysis/user/ang/PbPb/run_analysis_ang.py new file mode 100755 index 000000000..6e05ed6b6 --- /dev/null +++ b/pyjetty/alice_analysis/analysis/user/ang/PbPb/run_analysis_ang.py @@ -0,0 +1,2641 @@ +#! /usr/bin/env python + +""" theory_comp.py +Loads theory comparisons, preforms un/folding, makes plots +Ezra Lesser, 2020 (elesser@berkeley.edu) +""" + +import sys +import os +import argparse +from array import * +import numpy as np +import math # for exp() +import ROOT +ROOT.gSystem.Load("$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so") +import yaml + +# For log y plots which ROOT just decides not to work for +#import matplotlib +#matplotlib.rcParams['text.usetex'] = True # LaTeX labels +import matplotlib.pyplot as plt +from mpl_toolkits.axes_grid1 import make_axes_locatable +plt.rcParams["yaxis.labellocation"] = 'top' +plt.rcParams["xaxis.labellocation"] = 'right' + +from pyjetty.alice_analysis.analysis.user.substructure import run_analysis +from pyjetty.alice_analysis.analysis.user.ang.PbPb import plotting_utils_ang + +# Load pyjetty ROOT utils +ROOT.gSystem.Load('libpyjetty_rutil') + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) + +################################################################ +####################### RUN ANALYSIS ######################### +################################################################ +class RunAnalysisAng(run_analysis.RunAnalysis): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, config_file='', **kwargs): + super(RunAnalysisAng, self).__init__(config_file, **kwargs) + + # Initialize yaml config + self.initialize_user_config() + + print(self) + + + #--------------------------------------------------------------- + # Initialize config file into class members + #--------------------------------------------------------------- + def initialize_user_config(self): + + # Read config file + with open(self.config_file, 'r') as stream: + config = yaml.safe_load(stream) + + self.figure_approval_status = config['figure_approval_status'] + self.plot_overlay_list = \ + self.obs_config_dict['common_settings']['plot_overlay_list'] + + self.jet_matching_distance = config['jet_matching_distance'] + + self.is_pp = True + self.results_pp = None + if 'constituent_subtractor' in config: + self.is_pp = False + self.results_pp = config["results_pp"] if "results_pp" in config else None + self.max_distance = config["constituent_subtractor"]["max_distance"] + print('is_pp: {}'.format(self.is_pp)) + + self.theory_predictions = config["theory_predictions"] if \ + "theory_predictions" in config else [] + self.theory_predictions_names = config["theory_predictions_names"] if \ + "theory_predictions_names" in config else [] + + # Whether or not to use the previous measurement in ratio + self.use_prev_result = config["use_prev_result"] + + self.histutils = ROOT.RUtil.HistUtils() + + self.colors = [ROOT.kRed+1, ROOT.kGreen+2, ROOT.kBlue, ROOT.kOrange+2, + ROOT.kViolet+2, ROOT.kCyan+1, ROOT.kPink+10, ROOT.kGray+1, + ROOT.kYellow+4, ROOT.kAzure+8, ROOT.kRed] + #self.colors = self.ColorArray + + #--------------------------------------------------------------- + # This function is called once for each subconfiguration + #--------------------------------------------------------------- + def plot_single_result(self, jetR, obs_label, obs_setting, grooming_setting): + #print('Plotting each individual result...') + + # Plot final result for each 1D substructure distribution (with PYTHIA) + self.plot_final_result(jetR, obs_label, obs_setting, grooming_setting, draw_ratio=True) + + + #--------------------------------------------------------------- + # This function is called once after all subconfigurations have been looped over, for each R + #--------------------------------------------------------------- + def plot_all_results(self, jetR): + + print('Plotting overlay of all results...') + + for i_config, overlay_list in enumerate(self.plot_overlay_list): + + #if len(overlay_list) > 1: + + self.plot_final_result_overlay(i_config, jetR, overlay_list) + + + #---------------------------------------------------------------------- + # This function is called once after all subconfigurations and jetR have been looped over + #---------------------------------------------------------------------- + def plot_performance(self): + + if not self.do_plot_performance: + return + print('Plotting performance plots...') + + # Initialize performance plotting class, and plot + if self.is_pp: + + self.plotting_utils = plotting_utils_theta_g.PlottingUtils( + self.output_dir_performance, self.config_file) + self.plot_single_performance(self.output_dir_performance) + + # Pb-Pb case + else: + + # Plot for each R_max + for R_max in self.max_distance: + + output_dir_performance = os.path.join( + self.output_dir_performance, 'Rmax{}'.format(R_max)) + self.plotting_utils = plotting_utils_ang.PlottingUtils( + output_dir_performance, self.config_file, R_max = R_max) + self.plot_single_performance(output_dir_performance, R_max) + + # Plot for thermal model + if self.do_thermal_closure and R_max == self.R_max: + + output_dir_performance = os.path.join(self.output_dir_performance, 'thermal') + self.plotting_utils = plotting_utils_ang.PlottingUtils( + output_dir_performance, self.config_file, R_max = R_max, thermal = True) + self.plot_single_performance(output_dir_performance, R_max) + + return + + + #---------------------------------------------------------------------- + # This function is called once after all subconfigurations and jetR have been looped over + #---------------------------------------------------------------------- + def plot_single_performance(self, output_dir_performance, R_max = None): + + if R_max: + suffix = '_Rmax{}'.format(R_max) + else: + suffix = '' + + # Create output subdirectories + self.create_output_subdir(output_dir_performance, 'jet') + self.create_output_subdir(output_dir_performance, 'resolution') + self.create_output_subdir(output_dir_performance, 'residual_pt') + self.create_output_subdir(output_dir_performance, 'residual_obs') + self.create_output_subdir(output_dir_performance, 'mc_projections_det') + self.create_output_subdir(output_dir_performance, 'mc_projections_truth') + self.create_output_subdir(output_dir_performance, 'truth') + self.create_output_subdir(output_dir_performance, 'data') + if not self.is_pp: + self.create_output_subdir(output_dir_performance, 'delta_pt') + + # Generate performance plots + for jetR in self.jetR_list: + + # Plot some subobservable-independent performance plots + self.plotting_utils.plot_DeltaR(jetR, self.jet_matching_distance) + self.plotting_utils.plot_JES(jetR) + self.plotting_utils.plot_JES_proj(jetR, self.pt_bins_reported) + self.plotting_utils.plotJER( + jetR, self.utils.obs_label(self.obs_settings[0], self.grooming_settings[0])) + self.plotting_utils.plot_jet_reco_efficiency( + jetR, self.utils.obs_label(self.obs_settings[0], self.grooming_settings[0])) + + if not self.is_pp: + self.plotting_utils.plot_delta_pt(jetR, self.pt_bins_reported) + + # Plot subobservable-dependent performance plots + for i, _ in enumerate(self.obs_subconfig_list): + + obs_setting = self.obs_settings[i] + grooming_setting = self.grooming_settings[i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + self.plotting_utils.plot_obs_resolution( + jetR, obs_label, self.xtitle, self.pt_bins_reported) + self.plotting_utils.plot_obs_residual_pt( + jetR, obs_label, self.xtitle, self.pt_bins_reported) + self.plotting_utils.plot_obs_residual_obs(jetR, obs_label, self.xtitle) + self.plotting_utils.plot_obs_projections( + jetR, obs_label, obs_setting, grooming_setting, self.xtitle, self.pt_bins_reported) + self.plotting_utils.plot_obs_truth( + jetR, obs_label, obs_setting, grooming_setting, self.xtitle, self.pt_bins_reported) + + return + + + #---------------------------------------------------------------------- + def plot_final_result(self, jetR, obs_label, obs_setting, grooming_setting, draw_ratio=False): + print('Plot final results for {}: R = {}, {}'.format(self.observable, jetR, obs_label)) + + self.utils.set_plotting_options() + ROOT.gROOT.ForceStyle() + + # Loop through pt slices, and plot final result for each 1D distribution + for i in range(0, len(self.pt_bins_reported) - 1): + min_pt_truth = self.pt_bins_reported[i] + max_pt_truth = self.pt_bins_reported[i+1] + maxbin = self.obs_max_bins(obs_label)[i] + + # Do special plotting for comparison to Run 1 case + if self.use_prev_result: + self.plot_observable( + jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, + max_pt_truth, maxbin, plot_MC=False, draw_ratio=True) + return + + self.plot_observable( + jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, + max_pt_truth, maxbin, plot_MC=True, draw_ratio=True) + + if self.results_pp: + self.plot_observable( + jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, + max_pt_truth, maxbin, plot_pp_data=True, plot_MC=True, draw_ratio=True) + + self.plot_observable( + jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, + maxbin, plot_pp_data=True, plot_MC=True, plot_PbPb=False, draw_ratio=True) + + #---------------------------------------------------------------------- + def plot_observable( + self, jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, + maxbin, plot_pp_data=False, plot_MC=False, plot_PbPb=True, draw_ratio=False): + + self.set_logy = True if (grooming_setting and self.observable == "ang") else False + make_ratio_plot = True if (plot_MC or plot_pp_data and draw_ratio) else False + match_data_normalization = True + plot_pythia_and_herwig = (not plot_PbPb) + ignore_MC_top_panel = (plot_pp_data and draw_ratio and plot_PbPb) + + name = 'cResult_R{}_{}_{}-{}'.format(jetR, obs_label, min_pt_truth, max_pt_truth) + c = ROOT.TCanvas(name, name, 600, 900 if make_ratio_plot else 450) + c.Draw() + + c.cd() + pad_y_split = 0.55 + if self.observable == "mass" and min_pt_truth == 80: + pad_left_margin = 0.18 + else: + pad_left_margin = 0.18 #0.16 if self.set_logy else 0.15 + myPad = ROOT.TPad('myPad', 'The pad', 0, pad_y_split if make_ratio_plot else 0, 1, 1) + myPad.SetLeftMargin(pad_left_margin) + myPad.SetTopMargin(0.03) + myPad.SetRightMargin(0.04) + myPad.SetBottomMargin(0 if make_ratio_plot else 0.18) + if self.set_logy: + myPad.SetLogy() + myPad.SetTicks(1, 1) + myPad.Draw() + myPad.cd() + + color = 1 # black for data + + # Get histograms + name = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + if grooming_setting: + fraction_tagged = getattr(self, 'tagging_fraction_R{}_{}_{}-{}'.format( + jetR, obs_label, min_pt_truth, max_pt_truth)) + #fraction_tagged = getattr(self, '{}_fraction_tagged'.format(name)) + # maxbin+1 in grooming case to account for extra tagging bin + if grooming_setting and maxbin: + h = self.truncate_hist(getattr(self, name), None, maxbin+1, (name+'_trunc').replace("__","_")) + else: + h = self.truncate_hist(getattr(self, name), None, maxbin, (name+'_trunc').replace("__", "_")) + h.SetMarkerSize(1.5) + h.SetMarkerStyle(20) + h.SetMarkerColor(color) + h.SetLineStyle(1) + h.SetLineWidth(2) + h.SetLineColor(color) + + h_sys = getattr(self, 'hResult_{}_systotal_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)) + h_sys.SetLineColor(0) + h_sys.SetFillColor(color) + h_sys.SetFillColorAlpha(color, 0.3) + h_sys.SetFillStyle(1001) + h_sys.SetLineWidth(0) + + n_obs_bins_truth = self.n_bins_truth(obs_label) + truth_bin_array = self.truth_bin_array(obs_label) + if maxbin: + truth_bin_array = truth_bin_array[0:maxbin+1] + n_obs_bins_truth = len(truth_bin_array)-1 + myBlankHisto = ROOT.TH1F('myBlankHisto','Blank Histogram', n_obs_bins_truth, truth_bin_array) + myBlankHisto.SetNdivisions(505) + alpha = obs_label.split("_")[0] + alpha_string = str(alpha)+",g" if grooming_setting else str(alpha) + if not make_ratio_plot: + if self.observable == "ang": + myBlankHisto.SetXTitle(self.xtitle.replace("#it{#alpha}}^{#it{#kappa}=1}", alpha_string+"}")) #.replace("alpha}", "alpha}="+alpha)) + elif self.observable == "mass": + if grooming_setting: + myBlankHisto.SetXTitle(self.xtitle.replace("{m}_{jet}", "{m}_{jet,g}")) + else: + myBlankHisto.SetXTitle(self.xtitle) + myBlankHisto.GetXaxis().SetTitleOffset(1.02) + myBlankHisto.GetXaxis().SetTitleSize(0.06) + if self.observable == "ang": + ytit = self.ytitle.replace("#it{#alpha}}^{#it{#kappa}=1}", alpha_string+"}") #.replace("alpha}", "alpha}="+alpha)) + if grooming_setting: + myBlankHisto.SetYTitle(ytit.replace("{#sigma}_{jet}", "{#sigma}_{inc}")) + else: + myBlankHisto.SetYTitle(ytit.replace("{#sigma}_{jet}", "{#sigma}")) + elif self.observable == "mass": + if grooming_setting: + myBlankHisto.SetYTitle(self.ytitle.replace("{m}_{jet}", "{m}_{jet,g}").replace("{#sigma}_{jet}", "{#sigma}_{inc}")) + else: + myBlankHisto.SetYTitle(self.ytitle.replace("{#sigma}_{jet}", "{#sigma}")) + if self.observable == "mass" and min_pt_truth == 80: + myBlankHisto.GetYaxis().SetTitleOffset(1.35) + else: + myBlankHisto.GetYaxis().SetTitleOffset(1) #1.3 if self.set_logy else 1) + myBlankHisto.GetYaxis().SetTitleSize(0.07) + + plot_pythia = False; plot_herwig = False; + plot_jewel_no_recoils = False; plot_jewel_recoils = False; plot_jewel_pp = False + plot_jetscape = False; plot_jetscape_pp = False; + plot_hybrid = False; plot_hybrid_pp = False; + plot_zhang = False; plot_zhang_pp = False; + n_pp_models = 0; n_AA_models = 0 + if plot_MC: + + hPythia = None; fraction_tagged_pythia = None; + hHerwig = None; fraction_tagged_herwig = None; + hJewel_pp = None; fraction_tagged_jewel_pp = None; + hJetscape_pp = None; fraction_tagged_jetscape_pp = None; + hJetscape = None; fraction_tagged_jetscape = None; + hZhang_pp = None; fraction_tagged_zhang_pp = None; + hZhang = None; fraction_tagged_zhang = None; + maxbin_adj = maxbin + 1 if (maxbin != None and grooming_setting) else maxbin + + if plot_pp_data: + if plot_pythia_and_herwig: + hPythia, fraction_tagged_pythia = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'Pythia') + hHerwig, fraction_tagged_herwig = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'Herwig') + #hJewel_pp, fraction_tagged_jewel_pp = self.MC_prediction( + # jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'JEWEL pp') + hJetscape_pp, fraction_tagged_jetscape_pp = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'JETSCAPE pp') + hZhang_pp, fraction_tagged_zhang_pp = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'Zhang pp') + + hJewel_no_recoils, fraction_tagged_jewel_no_recoils, \ + hJewel_recoils, fraction_tagged_jewel_recoils = None, None, None, None + #hJewel_no_recoils, fraction_tagged_jewel_no_recoils = self.MC_prediction( + # jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'JEWEL', recoils=False) + #hJewel_recoils, fraction_tagged_jewel_recoils = self.MC_prediction( + # jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'JEWEL', recoils=True) + hJetscape, fraction_tagged_jetscape = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'JETSCAPE') + hZhang, fraction_tagged_zhang = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj, 'Zhang') + + hHybridNoElastic_pp, hHybridWithElastic_pp, hHybridNoElastic, hHybridWithElastic = self.get_hybrid( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin_adj) + + if hPythia: + color_pythia = self.colors[0] + # Create clone with 0 error for solid line + hPythia_draw = hPythia.Clone(hPythia.GetName()+"_drawclone") + for i in range(hPythia_draw.GetNbinsX()+1): + hPythia_draw.SetBinError(i, 0) + #hPythia_draw.SetMarkerSize(1.5) + #hPythia_draw.SetMarkerStyle(21) + #hPythia_draw.SetMarkerColor(600-6) + #hPythia_draw.SetFillColorAlpha(color_pythia, 0.5) + #hPythia_draw.SetFillStyle(1001) + hPythia_draw.SetLineStyle(1) + hPythia_draw.SetLineColor(color_pythia) + hPythia_draw.SetLineWidth(4) + plot_pythia = True + n_pp_models += 1 + #else: + # print('No PYTHIA prediction for %s %s' % (self.observable, obs_label)) + + if hHerwig: + color_herwig = self.colors[1] + # Create clone with 0 error for solid line + hHerwig_draw = hHerwig.Clone(hHerwig.GetName()+"_drawclone") + for i in range(hHerwig_draw.GetNbinsX()+1): + hHerwig_draw.SetBinError(i, 0) + #hHerwig_draw.SetMarkerSize(1.5) + #hHerwig_draw.SetMarkerStyle(33) + #hHerwig_draw.SetMarkerColor(2) + #hHerwig_draw.SetFillColorAlpha(color_herwig, 0.5) + #hHerwig_draw.SetFillStyle(1001) + hHerwig_draw.SetLineStyle(7) + hHerwig_draw.SetLineColor(color_herwig) + hHerwig_draw.SetLineWidth(4) + plot_herwig = True + n_pp_models += 1 + #else: + # print('No Herwig prediction for %s %s' % (self.observable, obs_label)) + + if hJewel_pp: + # Create clone with 0 error for solid line + hJewel_pp_draw = hJewel_pp.Clone(hJewel_pp.GetName()+"_drawclone") + for i in range(hJewel_pp_draw.GetNbinsX()+1): + hJewel_pp_draw.SetBinError(i, 0) + color_jewel_pp = self.colors[2] + #hJewel_pp_draw.SetMarkerSize(1.5) + #hJewel_pp_draw.SetMarkerStyle(28) + #hJewel_pp_draw.SetMarkerColor(8) + #hJewel_pp_draw.SetFillColorAlpha(color_jewel_pp, 0.5) + #hJewel_pp_draw.SetFillStyle(1001) + hJewel_pp_draw.SetLineStyle(8) + hJewel_pp_draw.SetLineColor(color_jewel_pp) + hJewel_pp_draw.SetLineWidth(4) + plot_jewel_pp = True + n_pp_models += 1 + #else: + # print('No JEWEL pp prediction for %s %s' % (self.observable, obs_label)) + + if hJetscape_pp: + # Check that binning is the same as data + b_d = [h.GetBinLowEdge(i) for i in range(1, h.GetNbinsX()+2)] + b_j = [hJetscape_pp.GetBinLowEdge(i) for i in range(1, hJetscape_pp.GetNbinsX()+2)] + if b_j != b_d: + print("JETSCAPE pp BINS ARE DIFFERENT for %s!\n" % obs_label, + "*** data: ", b_d, "\n*** jtsp: ", b_j, sep="") + else: + color_jetscape = self.colors[3] + #hJetscape_pp.SetMarkerSize(1.5) + #hJetscape_pp.SetMarkerStyle(42) + #hJetscape_pp.SetMarkerColor(44) + hJetscape_pp.SetFillColorAlpha(color_jetscape, 0.7) + hJetscape_pp.SetFillStyle(1001) + hJetscape_pp.SetLineStyle(1) + hJetscape_pp.SetLineColor(color_jetscape) + hJetscape_pp.SetLineWidth(0) + n_pp_models += 1 + plot_jetscape_pp = True + #else: + # print('No JETSCAPE prediction for %s %s' % (self.observable, obs_label)) + + if hZhang_pp: + # Set 0 error for solid line + for i in range(hZhang_pp.GetNbinsX()+1): + hZhang_pp.SetBinError(i, 0) + color_zhang = self.colors[6] + #hZhang_pp.SetMarkerSize(1.5) + #hZhang_pp.SetMarkerStyle(27) + #hZhang_pp.SetMarkerColor(95) + #hZhang_pp.SetFillColorAlpha(color_zhang, 0.7) + #hZhang_pp.SetFillStyle(1001) + hZhang_pp.SetLineStyle(3) + hZhang_pp.SetLineColor(color_zhang) + hZhang_pp.SetLineWidth(4) + n_pp_models += 1 + plot_zhang_pp = True + + if hJewel_no_recoils: + color_jewel_no_recoils = self.colors[0] + # Create clone with 0 error for solid line + hJewel_no_recoils_draw = hJewel_no_recoils.Clone(hJewel_no_recoils.GetName()+"_drawclone") + for i in range(hJewel_no_recoils_draw.GetNbinsX()+1): + hJewel_no_recoils_draw.SetBinError(i, 0) #hJewel_no_recoils.SetMarkerSize(1.5) + #hJewel_no_recoils.SetMarkerStyle(34) + #hJewel_no_recoils.SetMarkerColor(42) + #hJewel_no_recoils.SetFillColorAlpha(color_jewel_no_recoils, 0.5) + #hJewel_no_recoils.SetFillStyle(1001) + hJewel_no_recoils_draw.SetLineStyle(1) + hJewel_no_recoils_draw.SetLineColor(color_jewel_no_recoils) + hJewel_no_recoils_draw.SetLineWidth(4) + plot_jewel_no_recoils = True + n_AA_models += 1 + #else: + # print('No JEWEL (recoils off) prediction for %s %s' % (self.observable, obs_label)) + + if hJewel_recoils: + color_jewel_recoils = self.colors[1] + # Create clone with 0 error for solid line + hJewel_recoils_draw = hJewel_recoils.Clone(hJewel_recoils.GetName()+"_drawclone") + for i in range(hJewel_recoils_draw.GetNbinsX()+1): + hJewel_recoils_draw.SetBinError(i, 0) #hJewel_no_recoils.SetMarkerSize(1.5) + #hJewel_recoils.SetMarkerSize(1.5) + #hJewel_recoils.SetMarkerStyle(47) + #hJewel_recoils.SetMarkerColor(4) + #hJewel_recoils.SetFillColorAlpha(color_jewel_recoils, 0.5) + #hJewel_recoils.SetFillStyle(1001) + hJewel_recoils_draw.SetLineStyle(7) + hJewel_recoils_draw.SetLineColor(color_jewel_recoils) + hJewel_recoils_draw.SetLineWidth(4) + plot_jewel_recoils = True + n_AA_models += 1 + #else: + # print('No JEWEL (recoils on) prediction for %s %s' % (self.observable, obs_label)) + + if hJetscape: + # Check that binning is the same as data + b_d = [h.GetBinLowEdge(i) for i in range(1, h.GetNbinsX()+2)] + b_j = [hJetscape.GetBinLowEdge(i) for i in range(1, hJetscape.GetNbinsX()+2)] + if b_j != b_d: + print("JETSCAPE AA BINS ARE DIFFERENT for %s!\n" % obs_label, + "*** data: ", b_d, "*** jtsp: ", b_j, sep="") + else: + color_jetscape = self.colors[3] + #hJetscape.SetMarkerSize(1.5) + #hJetscape.SetMarkerStyle(43) + #hJetscape.SetMarkerColor(6) + hJetscape.SetFillColorAlpha(color_jetscape, 0.7) + hJetscape.SetFillStyle(1001) + hJetscape.SetLineStyle(1) + hJetscape.SetLineColor(color_jetscape) + hJetscape.SetLineWidth(0) + plot_jetscape = True + n_AA_models += 1 + #else: + # print('No JETSCAPE prediction for %s %s' % (self.observable, obs_label)) + + if hZhang: + # Set 0 error for solid line + for i in range(hZhang.GetNbinsX()+1): + hZhang.SetBinError(i, 0) + color_zhang = self.colors[6] + #hZhang.SetMarkerSize(1.5) + #hZhang.SetMarkerStyle(33) + #hZhang.SetMarkerColor(2) + #hZhang.SetFillColorAlpha(color_zhang, 0.7) + #hZhang.SetFillStyle(1001) + hZhang.SetLineStyle(3) + hZhang.SetLineColor(color_zhang) + hZhang.SetLineWidth(4) + n_AA_models += 1 + plot_zhang = True + + if hHybridNoElastic: + plot_hybrid = True + color_hybrid_no_elastic = self.colors[4] + color_hybrid_elastic = self.colors[5] + + #hHybridNoElastic.SetMarkerSize(1.5) + #hHybridNoElastic.SetMarkerStyle(35) + #hHybridNoElastic.SetMarkerColor(7) + hHybridNoElastic.SetFillColorAlpha(color_hybrid_no_elastic, 0.5) + hHybridNoElastic.SetFillStyle(1001) + hHybridNoElastic.SetLineStyle(1) + hHybridNoElastic.SetLineColor(color_hybrid_no_elastic) + hHybridNoElastic.SetLineWidth(0) + + #hHybridWithElastic.SetMarkerSize(1.5) + #hHybridWithElastic.SetMarkerStyle(36) + #hHybridWithElastic.SetMarkerColor(8) + hHybridWithElastic.SetFillColorAlpha(color_hybrid_elastic, 0.5) + hHybridWithElastic.SetFillStyle(1001) + hHybridWithElastic.SetLineStyle(1) + hHybridWithElastic.SetLineColor(color_hybrid_elastic) + hHybridWithElastic.SetLineWidth(0) + + n_AA_models += 2 + + if hHybridNoElastic_pp and plot_pp_data: + plot_hybrid_pp = True + color_hybrid_pp = color_hybrid_no_elastic + + #hHybridNoElastic_pp.SetMarkerSize(1.5) + #hHybridNoElastic_pp.SetMarkerStyle(35) + #hHybridNoElastic_pp.SetMarkerColor(7) + hHybridNoElastic_pp.SetFillColorAlpha(color_hybrid_pp, 0.5) + hHybridNoElastic_pp.SetFillStyle(1001) + hHybridNoElastic_pp.SetLineStyle(8) + hHybridNoElastic_pp.SetLineColor(color_hybrid_pp) + hHybridNoElastic_pp.SetLineWidth(0) + + #hHybridWithElastic_pp.SetMarkerSize(1.5) + #hHybridWithElastic_pp.SetMarkerStyle(36) + #hHybridWithElastic_pp.SetMarkerColor(8) + hHybridWithElastic_pp.SetFillColorAlpha(color_hybrid_pp, 0.5) + hHybridWithElastic_pp.SetFillStyle(1001) + hHybridWithElastic_pp.SetLineStyle(8) + hHybridWithElastic_pp.SetLineColor(color_hybrid_pp) + hHybridWithElastic_pp.SetLineWidth(0) + + n_pp_models += 2 + + h_pp_data = None; h_pp_sys = None + if plot_pp_data: + h_pp_data, h_pp_sys = self.get_pp_data( + jetR, obs_label, min_pt_truth, max_pt_truth, + [h.GetBinLowEdge(i) for i in range(1, h.GetNbinsX()+2)]) + + h_pp_data.SetMarkerSize(1.5) + h_pp_data.SetMarkerStyle(4) #27) + h_pp_data.SetMarkerColor(1) + h_pp_data.SetFillColor(1) + h_pp_data.SetLineStyle(9) + h_pp_data.SetLineWidth(2) + h_pp_data.SetLineColor(1) + h_pp_sys.SetLineColor(0) + h_pp_sys.SetLineWidth(0) + #h_pp_sys.SetFillColor(1) + h_pp_sys.SetFillColorAlpha(color, 0.8) + h_pp_sys.SetFillStyle(3004) + + if self.observable != "mass" and not grooming_setting: + maxval = max(1.9*h.GetBinContent(int(0.4*h.GetNbinsX())), 1.7*h.GetMaximum()) + if plot_jetscape: + maxval = max(maxval, 1.9*hJetscape.GetMaximum()) + # Extra adjustment for some bins + alpha = obs_label.split("_")[0] + if min_pt_truth == 100 and alpha != "1": + maxval *= 1.2 + elif min_pt_truth == 80 and alpha == "3": + maxval *= 1.2 + else: + maxval = 2.1*max(h.GetBinContent(int(0.4*h.GetNbinsX())), h.GetBinContent(2), h.GetBinContent(3)) + if grooming_setting: + maxval *= 1.1 + ymin = 1e-3 # Prevent ROOT from drawing 0 on plots + if self.observable == "mass": + ymin = -0.015 + elif min_pt_truth == 40 and obs_label == "3": + ymin = -0.6 + if self.set_logy: + maxval *= 5 if (not plot_PbPb) else (1e1 if plot_pp_data else 5) + ymin = 5e-1 * h.GetMinimum() + if (ymin - 0.1) < 0 and abs(ymin - 0.1) < 0.015: # prevent y-axis label from being cut off + ymin -= 0.015 + myBlankHisto.SetMinimum(ymin) + myBlankHisto.SetMaximum(maxval) + myBlankHisto.Draw("E") + + if match_data_normalization: + m = 2 if grooming_setting else 1 + integral = h.Integral(m, h.GetNbinsX(), "width") + if plot_jewel_no_recoils: + hJewel_no_recoils_draw.Scale(integral / hJewel_no_recoils.Integral(m, h.GetNbinsX(), "width")) + hJewel_no_recoils.Scale(integral / hJewel_no_recoils.Integral(m, h.GetNbinsX(), "width")) + if plot_jewel_recoils: + hJewel_recoils_draw.Scale(integral / hJewel_recoils.Integral(m, h.GetNbinsX(), "width")) + hJewel_recoils.Scale(integral / hJewel_recoils.Integral(m, h.GetNbinsX(), "width")) + if plot_jetscape: + hJetscape.Scale(integral / hJetscape.Integral(m, h.GetNbinsX(), "width")) + if plot_zhang: + hZhang.Scale(integral / hZhang.Integral(m, h.GetNbinsX(), "width")) + if plot_hybrid: + hHybridNoElastic.Scale(integral / hHybridNoElastic.Integral(m, h.GetNbinsX(), "width")) + hHybridWithElastic.Scale(integral / hHybridWithElastic.Integral(m, h.GetNbinsX(), "width")) + + if plot_pp_data: + integral = h_pp_data.Integral(m, h_pp_data.GetNbinsX(), "width") + if plot_pythia: + hPythia_draw.Scale(integral / hPythia.Integral(m, h.GetNbinsX(), "width")) + hPythia.Scale(integral / hPythia.Integral(m, h.GetNbinsX(), "width")) + if plot_herwig: + hHerwig_draw.Scale(integral / hHerwig.Integral(m, h.GetNbinsX(), "width")) + hHerwig.Scale(integral / hHerwig.Integral(m, h.GetNbinsX(), "width")) + if plot_jewel_pp: + hJewel_pp_draw.Scale(integral / hJewel_pp.Integral(m, h.GetNbinsX(), "width")) + hJewel_pp.Scale(integral / hJewel_pp.Integral(m, h.GetNbinsX(), "width")) + if plot_jetscape_pp: + hJetscape_pp.Scale(integral / hJetscape_pp.Integral(m, h.GetNbinsX(), "width")) + if plot_zhang_pp: + hZhang_pp.Scale(integral / hZhang_pp.Integral(m, h.GetNbinsX(), "width")) + if plot_hybrid: + hHybridNoElastic_pp.Scale(integral / hHybridNoElastic_pp.Integral(m, h.GetNbinsX(), "width")) + hHybridWithElastic_pp.Scale(integral / hHybridWithElastic_pp.Integral(m, h.GetNbinsX(), "width")) + + if not ignore_MC_top_panel: + if not plot_PbPb: + if plot_pythia: + hPythia_draw.Draw('L 3 same') + if plot_herwig: + hHerwig_draw.Draw('L 3 same') + if plot_jewel_pp: + hJewel_pp_draw.Draw('L 3 same') + if plot_zhang_pp: + hZhang_pp.Draw('L 3 same') + if plot_hybrid_pp: + hHybridNoElastic_pp.Draw('E3 same') + #hHybridWithElastic_pp.Draw('E2 same') + if plot_jetscape_pp: + hJetscape_pp.Draw('E3 same') + else: + if plot_jewel_no_recoils: + hJewel_no_recoils_draw.Draw('L 3 same') + if plot_jewel_recoils: + hJewel_recoils_draw.Draw('L 3 same') + if plot_zhang: + hZhang.Draw('L 3 same') + if plot_hybrid: + hHybridNoElastic.Draw('E3 same') + hHybridWithElastic.Draw('E3 same') + if plot_jetscape: + hJetscape.Draw('E3 same') + + if plot_pp_data: + h_pp_sys.Draw("E2 same") + h_pp_data.Draw("PE X0 same") + + if plot_PbPb: + h_sys.Draw("E2 same") + h.Draw("PE X0 same") + + text_latex = ROOT.TLatex() + text_latex.SetNDC() + #text_xval = 0.53 if plot_PbPb else 0.56 + text_xval = 0.59 + text_yval = 0.9; delta_y = 0.075 + if self.observable == "ang" and plot_pp_data and not plot_PbPb: + text = 'ALICE' + else: + text = 'ALICE {}'.format(self.figure_approval_status) + text_latex.SetTextSize(0.055) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + text_latex.SetTextSize(0.055) + if plot_PbPb: + if not plot_pp_data: + text = '0#minus10% centrality Pb#minusPb' + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + text = '#sqrt{#it{s}_{NN}} = 5.02 TeV' + text_latex.DrawLatex(text_xval, text_yval, text) + else: + text = "pp #sqrt{#it{s}} = 5.02 TeV" + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + text = "Ch.-particle anti-#it{k}_{T} jets" + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + #text_xval += 0.1 if plot_PbPb else 0.07 + + text = str(min_pt_truth) + ' < #it{p}_{T}^{ch jet} < ' + str(max_pt_truth) + ' GeV/#it{c}' + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + text = '| #it{#eta}_{jet}| < %s' % str(0.9 - jetR) + subobs_label = self.utils.formatted_subobs_label(self.observable) + #if subobs_label: + # text += ', %s = %s' % (subobs_label, obs_setting) + text += ', #it{R} = ' + str(jetR) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + if grooming_setting: + text = self.utils.formatted_grooming_label(grooming_setting) #.replace("#beta}", "#beta}_{SD}") + text_latex.DrawLatex(text_xval, text_yval - 0.005, text) + text_yval -= delta_y + 0.005 + + if not match_data_normalization: + text_latex.SetTextSize(0.055) + text = ['#it{f}_{tagged}^{data} = %3.3f' % fraction_tagged] + if plot_pythia: + text.append('#it{f}_{tagged}^{PYTHIA} = %3.3f' % fraction_tagged_pythia) + if plot_herwig: + text.append('#it{f}_{tagged}^{Herwig} = %3.3f' % fraction_tagged_herwig) + if plot_jewel_pp: + text.append('#it{f}_{tagged}^{JEWEL pp} = %3.3f' % fraction_tagged_jewel_pp) + if plot_jewel_no_recoils: + text.append('#it{f}_{tagged}^{JEWEL} = %3.3f' % fraction_tagged_jewel_no_recoils) + if plot_jewel_recoils: + text.append('#it{f}_{tagged}^{JEWEL rec.} = %3.3f' % fraction_tagged_jewel_recoils) + if plot_jetscape: + text.append('#it{f}_{tagged}^{JETSCAPE} = %3.3f' % fraction_tagged_jetscape) + if plot_jetscape: + text.append('#it{f}_{tagged}^{JETSCAPE pp} = %3.3f' % fraction_tagged_jetscape_pp) + + # Print two of each f_tagged on a line + for list_i, sublist in enumerate([text[i:i+2] for i in range(0, len(text), 2)]): + line = ", ".join(sublist) + text_latex.DrawLatex(text_xval, text_yval, line) + text_yval -= delta_y + + maxy = 0.94 + #miny = maxy - 0.07 * n_AA_models if plot_PbPb else maxy - 0.07 * n_pp_models + miny = 0.65 if (plot_PbPb and plot_pp_data) else 0.75 + myLegend = ROOT.TLegend(0.2, miny, text_xval-0.02, maxy) + self.utils.setup_legend(myLegend, 0.054) + if plot_PbPb: + if plot_pp_data: + myLegend.AddEntry(h, '0#minus10% Pb#minusPb data', 'pe') + else: + myLegend.AddEntry(h, 'Pb#minusPb data', 'pe') + myLegend.AddEntry(h_sys, 'Pb#minusPb syst. uncert.', 'f') + if plot_pp_data: + myLegend.AddEntry(h_pp_data, 'pp data', 'pe') + myLegend.AddEntry(h_pp_sys, 'pp syst. uncert.', 'f') + myLegend.Draw() + + ########################################################################## + # Make ratio plot if desired + + if make_ratio_plot: + c.cd() + pad2 = ROOT.TPad('pad2', 'Ratio pad', 0, 0, 1, pad_y_split) + pad2.SetLeftMargin(pad_left_margin) + pad2.SetTopMargin(0) + pad2.SetRightMargin(0.04) + pad2.SetBottomMargin(0.18) + #if self.set_logy: + # pad2.SetLogy() + pad2.SetTicks(1, 1) + pad2.Draw() + pad2.cd() + + myBlankHisto2 = ROOT.TH1F('myBlankHisto2','Blank Histogram for Ratio', n_obs_bins_truth, truth_bin_array) + myBlankHisto2.SetNdivisions(505) + if self.observable == "ang": + alpha_string = str(alpha)+",g" if grooming_setting else str(alpha) + myBlankHisto2.SetXTitle(self.xtitle.replace("#it{#alpha}}^{#it{#kappa}=1}", alpha_string+"}")) #.replace("alpha}", "alpha}="+alpha)) + elif self.observable == "mass": + if grooming_setting: + myBlankHisto2.SetXTitle(self.xtitle.replace("{m}_{jet}", "{m}_{jet,g}")) + else: + myBlankHisto2.SetXTitle(self.xtitle) + myBlankHisto2.GetXaxis().SetTitleOffset(1.02) + myBlankHisto2.GetXaxis().SetTitleSize(0.06) + if plot_pp_data and plot_PbPb: + myBlankHisto2.SetYTitle("#frac{Pb#minusPb}{pp}") + elif plot_MC: + myBlankHisto2.SetYTitle("#frac{Theory}{Data}") + myBlankHisto2.GetYaxis().SetTitleOffset(1.3 if self.set_logy else 1.25) + myBlankHisto2.GetYaxis().SetTitleSize(0.055) + if plot_pp_data and plot_PbPb: + myBlankHisto2.SetMinimum(0.5) + if self.observable == "ang": + myBlankHisto2.SetMaximum(1.95) + else: + myBlankHisto2.SetMaximum(1.95) + elif plot_MC: + #if min_pt_truth == 100: + # myBlankHisto2.SetMinimum(0.35) + #else: + myBlankHisto2.SetMinimum(0.5) + if plot_pp_data: + myBlankHisto2.SetMaximum(1.95) + else: + #if min_pt_truth == 100: + # myBlankHisto2.SetMaximum(1.65) + #else: + myBlankHisto2.SetMaximum(1.95) + myBlankHisto2.Draw("E") + + # Draw dashed line at ratio = 1 + line = ROOT.TLine(0, 1, h.GetBinLowEdge(h.GetNbinsX()+1), 1) + line.SetLineColor(920+2) + line.SetLineStyle(2) + line.Draw() + + # pp MC / pp data + if not plot_PbPb: + h_pp_data_no_error = h_pp_data.Clone(h_pp_data.GetName()+"_no_error") + for i in range(1, h_pp_data.GetNbinsX()+1): + h_pp_data_no_error.SetBinError(i, 0) + + if plot_pythia: + hPythiaRatio = hPythia.Clone(hPythia.GetName()+"_ratio") + hPythiaRatio.Divide(h_pp_data_no_error) + hPythiaRatio.SetMarkerSize(0) + hPythiaRatio.SetFillColorAlpha(color_pythia, 1) + hPythiaRatio.SetFillStyle(1001) + hPythiaRatio.SetLineStyle(1) + hPythiaRatio.SetLineColor(color_pythia) + hPythiaRatio.SetLineWidth(4) + hPythiaRatio.Draw('E3 same') + + if plot_herwig: + hHerwigRatio = hHerwig.Clone(hHerwig.GetName()+"_ratio") + hHerwigRatio.Divide(h_pp_data_no_error) + hHerwigRatio.SetMarkerSize(0) + hHerwigRatio.SetFillColorAlpha(color_herwig, 0.5) + hHerwigRatio.SetFillStyle(1001) + hHerwigRatio.SetLineStyle(1) + hHerwigRatio.SetLineColor(color_herwig) + hHerwigRatio.SetLineWidth(4) + hHerwigRatio.Draw('E3 same') + + if plot_jewel_pp: + hJewelRatio = hJewel_pp.Clone(hJewel_pp.GetName()+"_ratio") + hJewelRatio.Divide(h_pp_data_no_error) + hJewelRatio.SetMarkerSize(0) + hJewelRatio.SetFillColorAlpha(color_jewel_pp, 0.5) + hJewelRatio.SetFillStyle(1001) + hJewelRatio.SetLineStyle(1) + hJewelRatio.SetLineColor(color_jewel_pp) + hJewelRatio.SetLineWidth(4) + hJewelRatio.Draw('E3 same') + + if plot_hybrid_pp: + hHybridNoElasticRatio = hHybridNoElastic_pp.Clone(hHybridNoElastic_pp.GetName()+"_ratio") + hHybridNoElasticRatio.Divide(h_pp_data_no_error) + hHybridNoElasticRatio.SetMarkerSize(0) + hHybridNoElasticRatio.SetFillColorAlpha(color_hybrid_pp, 0.5) + hHybridNoElasticRatio.SetFillStyle(1001) + hHybridNoElasticRatio.SetLineStyle(1) + hHybridNoElasticRatio.SetLineColor(color_hybrid_pp) + hHybridNoElasticRatio.SetLineWidth(4) + hHybridNoElasticRatio.Draw('E3 same') + + #hHybridWithElasticRatio = hHybridWithElastic_pp.Clone(hHybridWithElastic_pp.GetName()+"_ratio") + #hHybridWithElasticRatio.Divide(h_pp_data) + #hHybridWithElasticRatio.Draw('E2 same') + + if plot_jetscape_pp: + hJetscapeRatio = hJetscape_pp.Clone(hJetscape_pp.GetName()+"_ratio") + hJetscapeRatio.Divide(h_pp_data_no_error) + hJetscapeRatio.SetMarkerSize(0) + hJetscapeRatio.SetFillColorAlpha(color_jetscape, 0.7) + hJetscapeRatio.SetFillStyle(1001) + hJetscapeRatio.SetLineStyle(1) + hJetscapeRatio.SetLineColor(color_jetscape) + hJetscapeRatio.SetLineWidth(4) + hJetscapeRatio.Draw('E3 same') + + if plot_zhang_pp: + hZhangRatio = hZhang_pp.Clone(hZhang_pp.GetName()+"_ratio") + hZhangRatio.Divide(h_pp_data_no_error) + hZhangRatio.SetMarkerSize(0) + #hZhangRatio.SetFillColorAlpha(color_zhang, 0.7) + #hZhangRatio.SetFillStyle(1001) + hZhangRatio.SetLineStyle(3) + hZhangRatio.SetLineColor(color_zhang) + hZhangRatio.SetLineWidth(4) + hZhangRatio.Draw('L 3 same') + + # Draw data stat + systematic uncertainties around ratio = 1 + hSysRatio = h_pp_sys.Clone(h_pp_sys.GetName()+"_ratio") + for i in range(0, hSysRatio.GetNbinsX()+1): + old_content = hSysRatio.GetBinContent(i) + if old_content != 0: + combined_error = math.sqrt( + hSysRatio.GetBinError(i) * hSysRatio.GetBinError(i) + \ + h_pp_data.GetBinError(i) * h_pp_data.GetBinError(i) ) + hSysRatio.SetBinError(i, combined_error/old_content) + hSysRatio.SetBinContent(i, 1) + hSysRatio.SetLineColor(0) + hSysRatio.SetFillColor(color) + hSysRatio.SetFillColorAlpha(color, 0.1) + hSysRatio.SetFillStyle(1001) + hSysRatio.SetLineWidth(0) + hSysRatio.Draw("E2 same") + + # Pb-Pb/pp data ratios + elif plot_pp_data: + h_sys_ppratio = h_sys.Clone(h_sys.GetName()+"_ppratio") + h_sys_ppratio.Divide(h_pp_sys) + h_sys_ppratio.Draw('E2 same') + + h_data_ppratio = h.Clone(h.GetName()+"_ppratio") + h_data_ppratio.Divide(h_pp_data) + h_data_ppratio.Draw('PE X0 same') + + # Theory ratios + if plot_jewel_no_recoils: + hJewelNoRecoilsRatio = hJewel_no_recoils.Clone(hJewel_no_recoils.GetName()+"_ppratio") + hJewelNoRecoilsRatio.Divide(hJewel_pp) + hJewelNoRecoilsRatio.SetMarkerSize(0) + hJewelNoRecoilsRatio.SetFillColorAlpha(color_jewel_no_recoils, 1) + hJewelNoRecoilsRatio.SetFillStyle(1001) + hJewelNoRecoilsRatio.SetLineStyle(1) + hJewelNoRecoilsRatio.SetLineColor(color_jewel_no_recoils) + hJewelNoRecoilsRatio.SetLineWidth(0) + hJewelNoRecoilsRatio.Draw('E3 same') + + if plot_jewel_recoils: + hJewelRecoilsRatio = hJewel_recoils.Clone(hJewel_recoils.GetName()+"_ppratio") + hJewelRecoilsRatio.Divide(hJewel_pp) + hJewelRecoilsRatio.SetMarkerSize(0) + hJewelRecoilsRatio.SetFillColorAlpha(color_jewel_recoils, 0.6) + hJewelRecoilsRatio.SetFillStyle(1001) + hJewelRecoilsRatio.SetLineStyle(1) + hJewelRecoilsRatio.SetLineColor(color_jewel_recoils) + hJewelRecoilsRatio.SetLineWidth(0) + hJewelRecoilsRatio.Draw('E3 same') + + if plot_jetscape: + hJetscapeRatio = hJetscape.Clone(hJetscape.GetName()+"_ppratio") + hJetscapeRatio.Divide(hJetscape_pp) + hJetscapeRatio.SetMarkerSize(0) + hJetscapeRatio.SetFillColorAlpha(color_jetscape, 0.5) + hJetscapeRatio.SetFillStyle(1001) + hJetscapeRatio.SetLineStyle(1) + hJetscapeRatio.SetLineColor(color_jetscape) + hJetscapeRatio.SetLineWidth(0) + hJetscapeRatio.Draw('E3 same') + + if plot_zhang: + hZhangRatio = hZhang.Clone(hZhang.GetName()+"_ppratio") + hZhangRatio.Divide(hZhang_pp) + hZhangRatio.SetMarkerSize(0) + #hZhangRatio.SetFillColorAlpha(color_zhang, 0.7) + #hZhangRatio.SetFillStyle(1001) + hZhangRatio.SetLineStyle(3) + hZhangRatio.SetLineColor(color_zhang) + hZhangRatio.SetLineWidth(4) + hZhangRatio.Draw('L 3 same') + + if plot_hybrid: + hHybridNoElasticRatio = hHybridNoElastic.Clone(hHybridNoElastic.GetName()+"_ppratio") + hHybridNoElasticRatio.Divide(hHybridNoElastic_pp) + hHybridNoElasticRatio.SetMarkerSize(0) + hHybridNoElasticRatio.SetFillColorAlpha(color_hybrid_no_elastic, 0.3) + hHybridNoElasticRatio.SetFillStyle(1001) + hHybridNoElasticRatio.SetLineStyle(1) + hHybridNoElasticRatio.SetLineColor(color_hybrid_no_elastic) + hHybridNoElasticRatio.SetLineWidth(0) + hHybridNoElasticRatio.Draw('E3 same') + + hHybridWithElasticRatio = hHybridWithElastic.Clone(hHybridWithElastic.GetName()+"_ppratio") + hHybridWithElasticRatio.Divide(hHybridWithElastic_pp) + hHybridWithElasticRatio.SetMarkerSize(0) + hHybridWithElasticRatio.SetFillColorAlpha(color_hybrid_elastic, 0.4) + hHybridWithElasticRatio.SetFillStyle(1001) + hHybridWithElasticRatio.SetLineStyle(1) + hHybridWithElasticRatio.SetLineColor(color_hybrid_elastic) + hHybridWithElasticRatio.SetLineWidth(0) + hHybridWithElasticRatio.Draw('E3 same') + + # Calculate difference in last bin + if (self.observable == "ang" and obs_label == "2") or (self.observable == "mass") and "SD" not in obs_label: + last_bin = h_data_ppratio.GetNbinsX() + print("*** Last bin, data: %.3f" % h_sys_ppratio.GetBinContent(last_bin), \ + "+/- %.3f" % h_data_ppratio.GetBinError(last_bin), "(stat)", + "+/- %.3f" % h_sys_ppratio.GetBinError(last_bin), "(sys)") + + print("*** Last bin, Hybrid w/ el.: %.3f" % hHybridWithElasticRatio.GetBinContent(last_bin), \ + "+/- %.3f" % hHybridWithElasticRatio.GetBinError(last_bin)) + diff = abs(h_sys_ppratio.GetBinContent(last_bin) - hHybridWithElasticRatio.GetBinContent(last_bin)) + diff_uncert = math.sqrt(h_data_ppratio.GetBinError(last_bin) ** 2 + \ + h_sys_ppratio.GetBinError(last_bin) ** 2 + \ + hHybridWithElasticRatio.GetBinError(last_bin) ** 2) + sigma = diff / diff_uncert + print("*** Difference: %.3f" % diff, "+/- %.3f" % diff_uncert, "(%.3f sigma)" % sigma) + + print("*** Last bin, Hybrid no-el.: %.3f" % hHybridNoElasticRatio.GetBinContent(last_bin), \ + "+/- %.3f" % hHybridNoElasticRatio.GetBinError(last_bin)) + diff = abs(h_sys_ppratio.GetBinContent(last_bin) - hHybridNoElasticRatio.GetBinContent(last_bin)) + diff_uncert = math.sqrt(h_data_ppratio.GetBinError(last_bin) ** 2 + \ + h_sys_ppratio.GetBinError(last_bin) ** 2 + \ + hHybridNoElasticRatio.GetBinError(last_bin) ** 2) + sigma = diff / diff_uncert + print("*** Difference: %.3f" % diff, "+/- %.3f" % diff_uncert, "(%.3f sigma)" % sigma) + + # Calculate MC ratio plots + elif plot_MC: + h_AA_no_error = h.Clone(h.GetName() + "_no_error") + for i in range(1, h.GetNbinsX() + 1): + h_AA_no_error.SetBinError(i, 0) + + if plot_jewel_no_recoils: + hJewelNoRecoilsRatio = hJewel_no_recoils.Clone(hJewel_no_recoils.GetName()+"_ratio") + hJewelNoRecoilsRatio.Divide(h_AA_no_error) + hJewelNoRecoilsRatio.SetMarkerSize(0) + hJewelNoRecoilsRatio.SetFillColorAlpha(color_jewel_no_recoils, 1) + hJewelNoRecoilsRatio.SetFillStyle(1001) + hJewelNoRecoilsRatio.SetLineStyle(1) + hJewelNoRecoilsRatio.SetLineColor(color_jewel_no_recoils) + hJewelNoRecoilsRatio.SetLineWidth(4) + hJewelNoRecoilsRatio.Draw('E3 same') + + if plot_jewel_recoils: + hJewelRecoilsRatio = hJewel_recoils.Clone(hJewel_recoils.GetName()+"_ratio") + hJewelRecoilsRatio.Divide(h_AA_no_error) + hJewelRecoilsRatio.SetMarkerSize(0) + hJewelRecoilsRatio.SetFillColorAlpha(color_jewel_recoils, 0.5) + hJewelRecoilsRatio.SetFillStyle(1001) + hJewelRecoilsRatio.SetLineStyle(1) + hJewelRecoilsRatio.SetLineColor(color_jewel_recoils) + hJewelRecoilsRatio.SetLineWidth(4) + hJewelRecoilsRatio.Draw('E3 same') + + if plot_jetscape: + hJetscapeRatio = hJetscape.Clone(hJetscape.GetName()+"_ratio") + hJetscapeRatio.Divide(h_AA_no_error) + hJetscapeRatio.SetMarkerSize(0) + hJetscapeRatio.SetFillColorAlpha(color_jetscape, 0.7) + hJetscapeRatio.SetFillStyle(1001) + hJetscapeRatio.SetLineStyle(1) + hJetscapeRatio.SetLineColor(color_jetscape) + hJetscapeRatio.SetLineWidth(4) + hJetscapeRatio.Draw('E3 same') + + if plot_zhang: + hZhangRatio = hZhang.Clone(hZhang.GetName()+"_ratio") + hZhangRatio.Divide(h_AA_no_error) + hZhangRatio.SetMarkerSize(0) + #hZhangRatio.SetFillColorAlpha(color_zhang, 0.7) + #hZhangRatio.SetFillStyle(1001) + hZhangRatio.SetLineStyle(3) + hZhangRatio.SetLineColor(color_zhang) + hZhangRatio.SetLineWidth(4) + hZhangRatio.Draw('L 3 same') + + if plot_hybrid: + hHybridNoElasticRatio = hHybridNoElastic.Clone(hHybridNoElastic.GetName()+"_ratio") + hHybridNoElasticRatio.Divide(h_AA_no_error) + hHybridNoElasticRatio.SetMarkerSize(0) + hHybridNoElasticRatio.SetFillColorAlpha(color_hybrid_no_elastic, 0.5) + hHybridNoElasticRatio.SetFillStyle(1001) + hHybridNoElasticRatio.SetLineStyle(1) + hHybridNoElasticRatio.SetLineColor(color_hybrid_no_elastic) + hHybridNoElasticRatio.SetLineWidth(4) + hHybridNoElasticRatio.Draw('E3 same') + + hHybridWithElasticRatio = hHybridWithElastic.Clone(hHybridWithElastic.GetName()+"_ratio") + hHybridWithElasticRatio.Divide(h_AA_no_error) + hHybridWithElasticRatio.SetMarkerSize(0) + hHybridWithElasticRatio.SetFillColorAlpha(color_hybrid_elastic, 0.5) + hHybridWithElasticRatio.SetFillStyle(1001) + hHybridWithElasticRatio.SetLineStyle(1) + hHybridWithElasticRatio.SetLineColor(color_hybrid_elastic) + hHybridWithElasticRatio.SetLineWidth(4) + hHybridWithElasticRatio.Draw('E3 same') + + # Draw data stat + systematic uncertainties around ratio = 1 + hSysRatio = h_sys.Clone(h_sys.GetName()+"_ratio") + for i in range(0, hSysRatio.GetNbinsX()+1): + old_content = hSysRatio.GetBinContent(i) + if old_content != 0: + combined_error = math.sqrt( + h_sys.GetBinError(i) * h_sys.GetBinError(i) + \ + h.GetBinError(i) * h.GetBinError(i) ) + hSysRatio.SetBinError(i, combined_error/old_content) + hSysRatio.SetBinContent(i, 1) + hSysRatio.SetLineColor(0) + hSysRatio.SetFillColor(color) + hSysRatio.SetFillColorAlpha(color, 0.1) + hSysRatio.SetFillStyle(1001) + hSysRatio.SetLineWidth(0) + hSysRatio.Draw("E2 same") + + if ignore_MC_top_panel: + maxy = 0.97 + miny = maxy - 0.06 * n_AA_models if plot_PbPb else maxy - 0.06 * n_pp_models + maxx = 0.98 + minx = 0.4 + if plot_pp_data and plot_PbPb and plot_MC and ((self.observable == "ang" \ + and "3" in obs_label and "SD" not in obs_label and min_pt_truth == 80) or \ + (self.observable == "mass" and "SD" not in obs_label and min_pt_truth == 40)): + maxx -= 0.22; minx -= 0.22 + myLegend2 = ROOT.TLegend(minx, miny, maxx, maxy) + self.utils.setup_legend(myLegend2, 0.041) + if plot_jewel_no_recoils: + myLegend2.AddEntry(hJewelNoRecoilsRatio, 'JEWEL (recoils off)', 'f') + if plot_jewel_recoils: + myLegend2.AddEntry(hJewelRecoilsRatio, 'JEWEL (recoils on)', 'f') + if plot_jetscape: + myLegend2.AddEntry(hJetscapeRatio, 'JETSCAPE (MATTER+LBT)', 'f') + if plot_zhang: + myLegend2.AddEntry(hZhangRatio, 'Higher-Twist parton #it{E}-loss', 'l') + if plot_hybrid: + myLegend2.AddEntry(hHybridNoElasticRatio, 'Hybrid model (no elastic)', 'f') + myLegend2.AddEntry(hHybridWithElasticRatio, 'Hybrid model (with elastic)', 'f') + myLegend2.Draw() + else: # if not ignore_MC_top_panel: + maxy = 0.97 + miny = maxy - 0.06 * n_AA_models if plot_PbPb else maxy - 0.045 * n_pp_models + maxx = 0.98 + minx = 0.39 if plot_PbPb else 0.48 + if self.observable == "mass" or plot_pp_data and not plot_PbPb and plot_MC and \ + self.observable == "ang" and "SD" not in obs_label and min_pt_truth in [60, 80]: + maxx -= 0.2 if plot_PbPb else 0.28 + minx -= 0.2 if plot_PbPb else 0.28 + myLegend2 = ROOT.TLegend(minx, miny, maxx, maxy) + self.utils.setup_legend(myLegend2, 0.042) + if plot_pythia: + myLegend2.AddEntry(hPythia_draw, 'PYTHIA8 Monash2013', 'l') + if plot_herwig: + myLegend2.AddEntry(hHerwig_draw, 'Herwig7 default tune', 'l') + if plot_jewel_pp: + myLegend2.AddEntry(hJewel_pp_draw, 'JEWEL pp', 'l') + if plot_zhang_pp: + myLegend2.AddEntry(hZhang_pp, 'POWHEG+PYTHIA6', 'l') + if plot_hybrid_pp: + myLegend2.AddEntry(hHybridNoElastic_pp, 'Hybrid model vacuum', 'f') + #myLegend2.AddEntry(hHybridWithElastic_pp, 'Hybrid model (with elastic) baseline') + if plot_jetscape_pp: + myLegend2.AddEntry(hJetscape_pp, 'JETSCAPE pp', 'f') + if plot_PbPb: + if plot_jewel_no_recoils: + myLegend2.AddEntry(hJewel_no_recoils_draw, 'JEWEL (recoils off)', 'l') + if plot_jewel_recoils: + myLegend2.AddEntry(hJewel_recoils_draw, 'JEWEL (recoils on)', 'l') + if plot_jetscape: + myLegend2.AddEntry(hJetscape, 'JETSCAPE (MATTER+LBT)', 'f') + if plot_zhang: + myLegend2.AddEntry(hZhang, 'Higher-Twist parton #it{E}-loss', 'l') + if plot_hybrid: + myLegend2.AddEntry(hHybridNoElastic, 'Hybrid model (no elastic)', 'f') + myLegend2.AddEntry(hHybridWithElastic, 'Hybrid model (with elastic)', 'f') + myLegend2.Draw() + + ########################################################################## + # Save plot to output + + name = 'hUnfolded_R{}_{}_{}-{}{}'.format(self.utils.remove_periods(jetR), obs_label, + int(min_pt_truth), int(max_pt_truth), self.file_format) + + if not plot_PbPb: + name = 'hUnfolded_R{}_{}_{}-{}_pp{}'.format( + self.utils.remove_periods(jetR), obs_label, int(min_pt_truth), + int(max_pt_truth), self.file_format) + + elif plot_pp_data: + name = 'hUnfolded_R{}_{}_{}-{}_ppcomp{}'.format( + self.utils.remove_periods(jetR), obs_label, int(min_pt_truth), + int(max_pt_truth), self.file_format) + + elif plot_MC: + name = 'hUnfolded_R{}_{}_{}-{}_MC{}'.format( + self.utils.remove_periods(jetR), obs_label, int(min_pt_truth), + int(max_pt_truth), self.file_format) + + name.replace("__", "_") + output_dir = getattr(self, 'output_dir_final_results') + output_dir_single = output_dir + '/single_results' + if not os.path.exists(output_dir_single): + os.mkdir(output_dir_single) + outputFilename = os.path.join(output_dir_single, name) + c.SaveAs(outputFilename) + c.Close() + + # Write result to ROOT file + final_result_root_filename = os.path.join(output_dir, 'fFinalResults.root') + fFinalResults = ROOT.TFile(final_result_root_filename, 'UPDATE') + h.Write() + h_sys.Write() + if plot_pythia: + hPythia.Write() + if plot_herwig: + hHerwig.Write() + if plot_jewel_no_recoils: + hJewel_no_recoils.Write() + if plot_jewel_recoils: + hJewel_recoils.Write() + if plot_jetscape: + hJetscape.Write() + if plot_zhang: + hZhang.Write() + fFinalResults.Close() + + + #---------------------------------------------------------------------- + # Get unfolded data from the previous measurement + def get_h_prev_result(self, jetR, obs_label, min_pt_truth, max_pt_truth): + + output_dir = getattr(self, 'output_dir_main') + + f = ROOT.TFile(self.results_pp, 'READ') + + # Retrieve previous result and ensure that it has the proper bin range + h_prev_data_name = ('hmain_%s_R%s_%s_%s-%s_trunc' % \ + (self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)).replace("__", "_") + h_prev_data = f.Get(h_prev_data_name) + if not h_prev_data: + raise AttributeError("%s not found in file %s" % (h_prev_data_name, self.results_pp)) + + # Rename and steal ownership from ROOT + name = 'h_prev_result_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_prev_data.SetNameTitle(name, name) + h_prev_data.SetDirectory(0) + + # Retrieve previous result systematics and ensure that it has the proper bin range + h_prev_sys_up_name = 'hResult_%s_sys_plus_R%s_%s_%s-%s' % \ + (self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_prev_sys_up = f.Get(h_prev_sys_up_name) + if not h_prev_sys_up: + raise AttributeError("%s not found in file %s" % (h_prev_sys_up_name, self.results_pp)) + name = 'h_prev_sys_up_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_prev_sys_up.SetNameTitle(name, name) + h_prev_sys_up.SetDirectory(0) + + h_prev_sys_down_name = 'hResult_%s_sys_minus_R%s_%s_%s-%s' % \ + (self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_prev_sys_down = f.Get(h_prev_sys_down_name) + if not h_prev_sys_down: + raise AttributeError("%s not found in file %s" % (h_prev_sys_down_name, self.results_pp)) + name = 'h_prev_sys_down_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_prev_sys_down.SetNameTitle(name, name) + h_prev_sys_down.SetDirectory(0) + + # Set normalization to 1 in all histograms + h_prev_data.Scale(jetR) + h_prev_sys_up.Scale(jetR) + h_prev_sys_down.Scale(jetR) + + return h_prev_data, h_prev_sys_up, h_prev_sys_down + + + #---------------------------------------------------------------------- + def MC_prediction(self, jetR, obs_setting, obs_label, min_pt_truth, + max_pt_truth, maxbin, MC='Pythia', overlay=False, recoils=False): + + scale_width = True + if 'pythia' in MC.lower(): + hMC = self.get_pythia_from_response( + jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay) + elif 'herwig' in MC.lower(): + hMC = self.get_herwig_from_response( + jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay) + elif "jewel" in MC.lower(): + hMC = self.get_jewel_from_response( + jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay, MC, recoils) + elif "jetscape" in MC.lower(): + hMC = self.get_jetscape_from_response( + jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay, MC) + elif "zhang" in MC.lower(): + hMC = self.get_zhang_from_file( + jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay, MC) + else: + raise NotImplementedError("Prediction type %s not recognized." % MC) + + if not hMC: + return [None, None] + + n_jets_inclusive = hMC.Integral(0, hMC.GetNbinsX(), "" if scale_width else "width") + n_jets_tagged = hMC.Integral(hMC.FindBin( + self.truth_bin_array(obs_label)[0]), hMC.GetNbinsX(), "" if scale_width else "width") + + fraction_tagged_MC = n_jets_tagged/n_jets_inclusive + hMC.Scale(1./n_jets_inclusive, "width" if scale_width else "") + + return [hMC, fraction_tagged_MC] + + #---------------------------------------------------------------------- + def get_pythia_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, + maxbin, overlay=False): + + # Use direct (unmatched) files instead of projecting fastsim RM + do_direct_files = (self.observable == "mass") #(len(self.theory_predictions) >= (int(recoils) + 1)) + + h = None + if do_direct_files: # Read from unmatched TH2 + + f = ROOT.TFile(self.main_response, 'READ') + name = "h_%s_JetPt_Truth_R%s_%sScaled" % (self.observable, str(jetR), obs_label) \ + if obs_label else "h_%s_JetPt_Truth_R%sScaled" % (self.observable, str(jetR)) + th2 = f.Get(name) + if not th2: + raise AttributeError("%s not found in %s" % (name, self.main_response)) + if not th2.GetSumw2(): + th2.Sumw2() + + # Set range and binning to be the same as data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_data = getattr(self, name_data) + pt_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + obs_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + move_underflow = (obs_bin_array[0] < 0) + + th2.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) + h = th2.ProjectionY() + + # Finally, rename and truncate the histogram to the correct size + name = ('hPythia_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)).replace("__", "_") + #print("before:", [h.GetXaxis().GetBinLowEdge(i) for i in range(2, h.GetNbinsX()+2)]) + #print("changing to:", obs_bin_array) + h_rebin = h.Rebin(len(obs_bin_array)-1, name+"_Rebin", obs_bin_array) + #print("after:", [h_rebin.GetXaxis().GetBinLowEdge(i) for i in range(2, h_rebin.GetNbinsX()+2)]) + #print("\n\n\n\n") + if move_underflow: + h_rebin.SetBinContent(1, h.GetBinContent(0)) + h_rebin.SetBinError(1, h.GetBinError(0)) + h = self.truncate_hist(h_rebin, None, maxbin, name) + h.SetDirectory(0) + + else: # Get projection of RM + output_dir = getattr(self, 'output_dir_main') + + filepath = os.path.join(output_dir, 'response.root') + f = ROOT.TFile(filepath, 'READ') + + thn_name = ('hResponse_JetPt_{}_R{}_{}_rebinned'.format( + self.observable, jetR, obs_label)).replace("__", "_") + thn = f.Get(thn_name) + if not thn: + raise AttributeError("%s not found in %s" % (thn_name, filepath)) + thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) + + name = ('hPythia_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)).replace("__", "_") + h = self.truncate_hist(thn.Projection(3), None, maxbin, name) + h.SetDirectory(0) + + return h + + #---------------------------------------------------------------------- + def get_herwig_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, + maxbin, overlay=False): + + # Use direct (unmatched) files instead of projecting fastsim RM + do_direct_files = (self.observable == "mass") #(len(self.theory_predictions) >= (int(recoils) + 1)) + + h = None + + if do_direct_files: # Read from TH2 + + f = ROOT.TFile(self.fastsim_response_list[1], 'READ') + name = "h_%s_JetPt_Truth_R%s_%sScaled" % (self.observable, str(jetR), obs_label) \ + if obs_label else "h_%s_JetPt_Truth_R%sScaled" % (self.observable, str(jetR)) + th2 = f.Get(name) + if not th2: + raise AttributeError("%s not found in %s" % (name, self.main_response)) + if not th2.GetSumw2(): + th2.Sumw2() + + # Set range and binning to be the same as data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_data = getattr(self, name_data) + pt_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + obs_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + move_underflow = (obs_bin_array[0] < 0) + + th2.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) + h = th2.ProjectionY() + + # Finally, rename and truncate the histogram to the correct size + name = ('hHerwig_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)).replace("__", "_") + h_rebin = h.Rebin(len(obs_bin_array)-1, name+"_Rebin", obs_bin_array) + if move_underflow: + h_rebin.SetBinContent(1, h.GetBinContent(0)) + h_rebin.SetBinError(1, h.GetBinError(0)) + h = self.truncate_hist(h_rebin, None, maxbin, name) + h.SetDirectory(0) + + else: # Get projection of RM + try: + filepath = os.path.join(self.output_dir_fastsim_generator1, 'response.root') + except AttributeError: # No fastsim generator + return None + f = ROOT.TFile(filepath, 'READ') + + thn_name = 'hResponse_JetPt_{}_R{}_{}_rebinned'.format( + self.observable, jetR, obs_label).replace("__", "_") + thn = f.Get(thn_name) + if not thn: + raise AttributeError("%s not found in %s" % (thn_name, filepath)) + thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) + + name = ('hHerwig_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)).replace("__", "_") + h = self.truncate_hist(thn.Projection(3), None, maxbin, name) + h.SetDirectory(0) + + return h + + #---------------------------------------------------------------------- + def get_jewel_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, + maxbin, overlay=False, MC="jewel", recoils=False): + + # Use direct (unmatched) files instead of projecting fastsim RM + do_direct_files = True #(len(self.theory_predictions) >= (int(recoils) + 1)) + # Copy uncertainties them from the fast sim + copy_errors = False + + # First, try to get from direct calculation files (no fast sim) + if do_direct_files: + # Find the theory file corresponding to this prediction + type = "recoils on" if recoils else "recoils off" + if "pp" in MC.lower(): + type = "pp" + + filepath = None + for i, name in enumerate(self.theory_predictions_names): + if "jewel" in name.lower() and type in name.lower(): + filepath = self.theory_predictions[i] + break + if not filepath: + return None + + f = ROOT.TFile(filepath, 'READ') + name = "h_%s_JetPt_R%s_%sScaled" % (self.observable, str(jetR), obs_label) if \ + obs_label else "h_%s_JetPt_R%sScaled" % (self.observable, str(jetR)) + th2 = f.Get(name) + if not th2: + raise AttributeError("%s not found in %s" % (name, filepath)) + if not th2.GetSumw2(): + th2.Sumw2() + + # Set range and binning to be the same as data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_data = getattr(self, name_data) + pt_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + obs_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + move_underflow = (obs_bin_array[0] < 0) + #th2 = self.histutils.rebin_th2( + # th2, th2.GetName()+"_rebin", pt_bin_array, len(pt_bin_array)-1, + # obs_bin_array, len(obs_bin_array)-1, move_underflow) + + th2.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) + h = th2.ProjectionY() + #for i in range(1, h.GetNbinsX()+1): + # h.SetBinError(i, 1e-10) + #copy_errors = True + + # Finally, rename and truncate the histogram to the correct size + name = 'hJewel_{}_{}_R{}_{}_{}-{}'.format( + type.replace(' ', '_'), self.observable, jetR, obs_label, + min_pt_truth, max_pt_truth) + h_rebin = h.Rebin(len(obs_bin_array)-1, name+"_Rebin", obs_bin_array) + if move_underflow: + h_rebin.SetBinContent(1, h.GetBinContent(0)) + h_rebin.SetBinError(1, h.GetBinError(0)) + h = self.truncate_hist(h_rebin, None, maxbin, name) + #print([h.GetBinLowEdge(i) for i in range(1, h.GetNbinsX()+2)]) + h.SetDirectory(0) + + if not do_direct_files or copy_errors: + # Second, try to get from the fast simulation RM + gen_to_use = 2 + int(recoils) + try: + filepath = os.path.join( + self.output_dir, self.observable, "fastsim_generator%i" % gen_to_use, 'response.root') + except AttributeError: + # Not doing JEWEL for this case + return None + f = ROOT.TFile(filepath, 'READ') + + thn_name = 'hResponse_JetPt_{}_R{}_{}_rebinned'.format( + self.observable, jetR, obs_label).replace("__", "_") + thn = f.Get(thn_name) + if not thn: + raise AttributeError("%s not found in %s" % (thn_name, filepath)) + thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) + + name = 'hJewel_recoils_{}_{}_R{}_{}_{}-{}'.format( + "on" if recoils else "off", self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_proj = self.truncate_hist(thn.Projection(3), None, maxbin, name) + if copy_errors: + for i in range(1, h.GetNbinsX()+1): + h.SetBinError(i, h_proj.GetBinError(i)) + else: + h = h_proj + h.SetDirectory(0) + + return h + + #---------------------------------------------------------------------- + def get_jetscape_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, + maxbin, overlay=False, MC="jetscape"): + + # Determine type from MC string arg + type = "AA" # Default to AA prediction + if "pp" in MC.lower(): + type = "pp" + elif "ratio" in MC.lower(): + type = "ratio" + + if self.observable == "mass": + # Yasuki's fixed predictions do not include the ratio; we have to make it + if type == "ratio": + return None + + i_pred = -1 + for i in range(0, len(self.theory_predictions_names)): + if "jetscape" in self.theory_predictions_names[i].lower(): #and type in self.theory_predictions_names[i].lower(): + i_pred = i + break + + if i_pred < 0: + return None + filepath = self.theory_predictions[i_pred] + f = ROOT.TFile(filepath, 'READ') + + #if self.observable != "ang": + # return None + gr = "" if "SD" in obs_label else "un" + if self.observable == "ang": + alpha = obs_label.split('_')[0] + name = "h_chjet_angularity_%sgroomed_alice_R%s_pt%i-%i_alpha%s_5020_0-10_%s" % \ + (gr, jetR, min_pt_truth, max_pt_truth, alpha, type) + elif self.observable == "mass": + name = "h_chjet_mass_%sgroomed_alice_R%s_pt%i-%i_5020_0-10_%s" % \ + (gr, jetR, min_pt_truth, max_pt_truth, type) + + h = f.Get(name) + if not h: + raise AttributeError("%s not found in %s" % (name, filepath)) + + name = 'hJetscape_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + width = [] + if "SD" in obs_label: + # Add negative bin to the groomed plots + bins = array('d', [-0.1] + [h.GetBinLowEdge(i) for i in range(1, h.GetNbinsX()+2)]) + h_new = ROOT.TH1F(name+"_new", name+"_new", len(bins)-1, bins) + integral = h.Integral(1, h.GetNbinsX()) + #h_new.SetBinContent(1, 0) + #h_new.SetBinError(1, 0) + max_to_use = h.GetNbinsX()+2 if self.observable == "ang" else h.GetNbinsX()+1 + for i in range(0, max_to_use): + width = 1 if i == 0 else abs(h.GetBinLowEdge(i + 1) - h.GetBinLowEdge(i)) + h_new.SetBinContent(i + 1, width * h.GetBinContent(i)) + h_new.SetBinError(i + 1, width * h.GetBinError(i)) + h = self.truncate_hist(h_new, None, maxbin, name) + h.Scale(1./integral) + else: + for i in range(1, h.GetNbinsX()+1): + width = abs(h.GetBinLowEdge(i + 1) - h.GetBinLowEdge(i)) + h.SetBinContent(i, width * h.GetBinContent(i)) + h.SetBinError(i, width * h.GetBinError(i)) + h = self.truncate_hist(h, None, maxbin, name) + h.Scale(1./h.Integral()) + h.SetDirectory(0) + + return h + + #---------------------------------------------------------------------- + def get_zhang_from_file( + self, jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay, MC): + + # At the moment only have R=0.2 predictions + if str(jetR) != "0.2": + return None + + # Determine and open the correct file + base_dir = "/rstorage/alice/AnalysisResults/ang/PbPb/zhang_predictions" + gr = "groomed" if "sd" in obs_label.lower() else "ungroomed" + type = "pp" if "pp" in MC.lower() else "PbPb" + + # Get observable-specific details + filename = "" + pt_range = "" + column = 0 + + if self.observable == "ang": + + filename = type + '-' + gr + "-jets.dat" + # Fixed predictions for groomed jets + if gr[0] == 'g': + filename = "final-" + filename + + alpha = obs_label.split('_')[0] + pt_range = "(%i-%i)GeV" % (min_pt_truth, max_pt_truth) + column = ["1", "1.5", "2", "3"].index(alpha) + + elif self.observable == "mass": + + # Alternate filename for mass calcluations + filename = gr + "_jet_mass.dat" + + pt_range = "//%i-%i//" % (min_pt_truth, max_pt_truth) + column = ["pp", "PbPb", "PbPb/pp"].index(type) + + else: + raise ValueError( + "Did not parse %s correctly: no predictions implemented" % self.observable) + + contents = [] + with open(os.path.join(base_dir, filename), 'r') as f: + contents = f.readlines() + + # Find the correct set of data -- we have 4 pT bins in each file + values = self.trim_contents(contents, min_pt_truth, max_pt_truth, pt_range, column) + + # Get the observable binning from the measured data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_data = self.truncate_hist(getattr(self, name_data), None, maxbin, name_data+"_trunc") + obs_bin_array = [h_data.GetXaxis().GetBinLowEdge(i) for i in range(1, h_data.GetNbinsX()+2)] + if "SD" in obs_label: + values = [0] + values + obs_bin_array = array('d', obs_bin_array) + + # Create and fill TH1 with values from file + name = ("hZhang_%s_R%s_%s_%i-%i" % (type, jetR, obs_label, min_pt_truth, max_pt_truth)).replace("__", "_") + h = ROOT.TH1F(name, name, len(obs_bin_array)-1, obs_bin_array) + for i in range(1, h.GetNbinsX()+1): + width = abs(h.GetBinLowEdge(i + 1) - h.GetBinLowEdge(i)) + h.SetBinContent(i, width * values[i-1]) + h.SetBinError(i, 1e-8) + h.Scale(1./h.Integral()) + del h_data + + return h + + #---------------------------------------------------------------------- + def trim_contents(self, contents, min_pt_truth, max_pt_truth, range, column): + + recording = False + data = [] + for line in contents: + # Determine line where the data of interest starts + if not recording: + if range in line: + recording = True + continue + + l_split = line.split() + if not len(l_split) or l_split[0][0] in ['p', 'a', 'n']: + continue + + # Stop condition + if line[0] in ['-', '/']: + if len(data): + break + continue + + val = [float(i) if i[0].isnumeric() else None for i in l_split][column] + if val != None: + data.append(val) + + return data + + #---------------------------------------------------------------------- + def get_hybrid(self, jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin): + + # For now only have R=0.2 predictions + if str(jetR) != "0.2" or self.observable not in ["ang", "mass"]: + return None, None, None, None + + # Get the observable binning from the measured data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) #.replace("__","_") + h_data = self.truncate_hist(getattr(self, name_data), None, maxbin, name_data+"_trunc") + obs_bin_array = [h_data.GetXaxis().GetBinLowEdge(i) for i in range(1, h_data.GetNbinsX()+2)] + obs_bin_array = array('d', obs_bin_array) + del h_data + + filepath = "/rstorage/alice/AnalysisResults/ang/PbPb/hybrid_%s/" % self.observable + + pT_bin = [40, 60, 80, 100].index(min_pt_truth) + if self.observable == "ang": + alpha = ["1", "1.5", "2", "3"].index(obs_label.split('_')[0]) + h_pp_NoElastic = None; h_pp_WithElastic = None; h_PbPb_NoElastic = None; h_PbPb_WithElastic = None + for elastic in ["No", "With"]: # Elastic Moliere scattering + filename = "" + if self.observable == "ang": + filename = "010_Angus_%sElastic_WantWake_1_JetR_2_Angus_JetBin_%i_Alpha_%i_Groomed_%i.dat" % \ + (elastic, pT_bin, alpha, int("SD" in obs_label)) + elif self.observable == "mass": + WantWake = True + el = elastic if elastic == "No" else "" + filename = "results/HYBRID_Hadrons_%sElastic_5020_010_Wake_%i_ChJetMass_JetR0p2_Gro_%i_JetBin_%i.dat" % \ + (el, int(WantWake), int("SD" in obs_label), pT_bin+1) + + # Create and fill TH1 with values from file + name = "hHybrid_%s_%sElastic_pp_R%s_%s_%i-%i" % \ + (self.observable, elastic, jetR, obs_label, min_pt_truth, max_pt_truth) + h_pp = ROOT.TH1F(name, name, len(obs_bin_array)-1, obs_bin_array) + name = "hHybrid_%s_%sElastic_PbPb_R%s_%s_%i-%i" % \ + (self.observable, elastic, jetR, obs_label, min_pt_truth, max_pt_truth) + h_PbPb = ROOT.TH1F(name, name, len(obs_bin_array)-1, obs_bin_array) + + # Read lines from file and fill histograms + lines = None + with open(os.path.join(filepath, filename), 'r') as f: + delin = ' ' if self.observable == "ang" else '\t' + lines = [line.split(delin) for line in f.readlines()] + + if self.observable == "ang": + obs_bin_center = [float(line[0]) for line in lines] + upper_band_PbPb = [float(line[1]) for line in lines] + lower_band_PbPb = [float(line[2]) for line in lines] + vacuum = [float(line[3]) for line in lines] + error_vacuum = [float(line[4]) for line in lines] + upper_band_ratio = [float(line[5]) for line in lines] + lower_band_ratio = [float(line[6]) for line in lines] + + elif self.observable == "mass": + obs_bin_center = [float(line[0]) for line in lines] + vacuum = [float(line[1]) for line in lines] + error_vacuum = [float(line[2]) for line in lines] + upper_band_PbPb = [float(line[3]) for line in lines] + lower_band_PbPb = [float(line[4]) for line in lines] + upper_band_ratio = [float(line[5]) for line in lines] + lower_band_ratio = [float(line[6]) for line in lines] + + if "SD" in obs_label: + obs_bin_center = [-0.05] + obs_bin_center + upper_band_PbPb = [0] + upper_band_PbPb + lower_band_PbPb = [0] + lower_band_PbPb + vacuum = [0] + vacuum + error_vacuum = [0] + error_vacuum + upper_band_ratio = [0] + upper_band_ratio + lower_band_ratio = [0] + lower_band_ratio + + # Check that obs_bin_center is the same as obs_bin_array + expected_obs_bin_center = [(obs_bin_array[i+1] - obs_bin_array[i]) / 2 + obs_bin_array[i] for \ + i in range(0, len(obs_bin_array)-1)] + try: + diff = [abs(expected_obs_bin_center[i] - obs_bin_center[i]) < 1e-8 for i in range(len(expected_obs_bin_center))] + except: + raise IndexError("Expected:", expected_obs_bin_center, "\nFound:", obs_bin_center) + if False in diff: + raise ValueError("Expected:", expected_obs_bin_center, "\nFound:", obs_bin_center) + + error_PbPb = [(upper_band_PbPb[i] - lower_band_PbPb[i]) / 2 for i in range(0, h_pp.GetNbinsX())] + center_PbPb = [lower_band_PbPb[i] + error_PbPb[i] for i in range(0, h_pp.GetNbinsX())] + + # Save to TH1 for pp + for i in range(0, h_pp.GetNbinsX()): + h_pp.SetBinContent(i+1, vacuum[i]) + h_pp.SetBinError(i+1, error_vacuum[i]) + for i in range(0, h_PbPb.GetNbinsX()): + h_PbPb.SetBinContent(i+1, center_PbPb[i]) + h_PbPb.SetBinError(i+1, error_PbPb[i]) + + if elastic == "No": + h_pp_NoElastic = h_pp + h_PbPb_NoElastic = h_PbPb + else: # elastic == "With" + h_pp_WithElastic = h_pp + h_PbPb_WithElastic = h_PbPb + + return [h_pp_NoElastic, h_pp_WithElastic, h_PbPb_NoElastic, h_PbPb_WithElastic] + + #---------------------------------------------------------------------- + def get_pp_data(self, jetR, obs_label, min_pt_truth, max_pt_truth, + xbins, overlay=False): + + output_dir = getattr(self, 'output_dir_main') + + f = ROOT.TFile(self.results_pp, 'READ') + + # Retrieve pp data and ensure that it has the proper bin range + h_pp_data_name = ('hmain_%s_R%s_%s_%s-%s_trunc' % \ + (self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)).replace('__', '_') + h_pp_data = f.Get(h_pp_data_name) + if not h_pp_data: + raise AttributeError("%s not found in file %s" % (h_pp_data_name, self.results_pp)) + + name = 'h_pp_data_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + if (xbins == [h_pp_data.GetBinLowEdge(i) for i in range(1, h_pp_data.GetNbinsX()+2)]): + h_pp_data.SetNameTitle(name, name) + else: + h_pp_data = h_pp_data.Rebin(len(xbins)-1, name, array('d', xbins)) + h_pp_data.SetDirectory(0) + + # Retrieve pp systematics and ensure that it has the proper bin range + for n_iter in range(2, 20): + h_pp_sys_name = 'hResult_%s_systotal_R%s_%s_n%i_%s-%s' % \ + (self.observable, jetR, obs_label, n_iter, min_pt_truth, max_pt_truth) + h_pp_sys = f.Get(h_pp_sys_name) + if h_pp_sys: + break + if not h_pp_sys: + raise AttributeError("%s not found in file %s" % (h_pp_sys_name, self.results_pp)) + + name = 'h_pp_sys_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + if (xbins == [h_pp_sys.GetBinLowEdge(i) for i in range(1, h_pp_sys.GetNbinsX()+2)]): + h_pp_sys.SetNameTitle(name, name) + else: + h_pp_sys = h_pp_sys.Rebin(len(xbins)-1, name, array('d', xbins)) + h_pp_sys.SetDirectory(0) + + return h_pp_data, h_pp_sys + + + #---------------------------------------------------------------------- + def plot_final_result_overlay(self, i_config, jetR, overlay_list): + print('Plotting overlay of', overlay_list) + + # Plot overlay of different subconfigs, for fixed pt bin + for i in range(0, len(self.pt_bins_reported) - 1): + min_pt_truth = self.pt_bins_reported[i] + max_pt_truth = self.pt_bins_reported[i+1] + maxbins = [self.obs_max_bins(obs_label)[i] for obs_label in self.obs_labels] + + # Plot PYTHIA comparison plots + self.plot_observable_overlay_subconfigs( + i_config, jetR, overlay_list, min_pt_truth, + max_pt_truth, maxbins, plot_MC=True, MC='PYTHIA', plot_ratio=True) + + if not self.is_pp and self.results_pp: + # Plot Pb-Pb/pp data comparison plots + self.plot_observable_overlay_subconfigs( + i_config, jetR, overlay_list, min_pt_truth, + max_pt_truth, maxbins, plot_pp_data=True, plot_ratio=True) + + else: + # Plot data-only plots + self.plot_observable_overlay_subconfigs( + i_config, jetR, overlay_list, min_pt_truth, + max_pt_truth, maxbins) + + + # Plot PYTHIA vs pp comparison + if min_pt_truth == 40 and not self.use_prev_result: + # Plot PYTHIA comparison plots + self.plot_observable_overlay_subconfigs( + i_config, jetR, overlay_list, min_pt_truth, max_pt_truth, maxbins, + plot_pp_data=True, plot_MC=True, MC='PYTHIA', plot_ratio=True) + + + #---------------------------------------------------------------------- + def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_pt_truth, + max_pt_truth, maxbins, plot_pp_data=False, plot_MC=False, + MC='PYTHIA', plot_nll=False, plot_ratio=False): + + # Flag to plot ratio all on the same scale, 0 to 2.2 + plot_ratio_same_scale = True + + # In the 2-ratio case, whether to plot just 1 value of alpha + single_alpha = (self.observable == "mass") or False + if single_alpha and plot_ratio and plot_pp_data and plot_MC: + overlay_list = [i for i in overlay_list if i == "config_R0.2_1"] + if not overlay_list: + return + + plot_pp_data = plot_pp_data if self.results_pp else False + + name = 'cResult_overlay_R{}_allpt_{}-{}'.format(jetR, min_pt_truth, max_pt_truth) + if plot_ratio: + c = ROOT.TCanvas(name, name, 600, 650 if not (plot_pp_data and plot_MC) else 900) + else: + c = ROOT.TCanvas(name, name, 600, 450) + c.Draw() + + c.cd() + minpad1y = 0.5 if (plot_pp_data and plot_MC) else 0.4 + if plot_ratio: + pad1 = ROOT.TPad('myPad', 'The pad', 0, minpad1y, 1, 1) + else: + pad1 = ROOT.TPad('myPad', 'The pad', 0, 0, 1, 1) + pad1.SetLeftMargin(0.2) + pad1.SetTopMargin(0.07) + pad1.SetRightMargin(0.04) + pad1.SetBottomMargin(0.13) + pad1.SetTicks(1, 1) + if plot_ratio: + pad1.SetBottomMargin(0.) + # set log y axis if all configs are SD + setlogy = False + for name in overlay_list: + if "SD" not in name: + break + if self.observable != "ang": + break + elif name == overlay_list[-1]: + setlogy = True + pad1.Draw() + pad1.cd() + + legend_xmin = 0.6 + legend_ymin = 0.52 + legend_ymax = legend_ymin + 0.31 + if plot_ratio: + if plot_pp_data and plot_MC: + if single_alpha: + legend_xmin = 0.52 #0.63 + legend_ymax = 0.91 + legend_ymin = 0.77 + else: + legend_xmin = 0.5 + legend_ymin = 0.25 + legend_ymax = legend_ymin + 0.31 + else: + legend_xmin = 0.61 + myLegend = ROOT.TLegend(legend_xmin, legend_ymin, 0.96, legend_ymax) + self.utils.setup_legend(myLegend, 0.045) + + legend2_ymin = legend_ymin + 0.17 + legend2_xmin = legend_xmin + 0.2 + legend2_ymax = legend_ymin + 0.31 + if plot_ratio: + if plot_pp_data: + if plot_MC: + if single_alpha: + # Use for MC & pp labels + legend2_xmin = legend_xmin #- 0.11 + legend2_ymin = legend_ymin - 0.18 + legend2_ymax = legend_ymin + else: + legend2_xmin = legend_xmin + 0.25 + legend2_ymin = legend_ymin + 0.21 + else: + legend2_xmin = legend_xmin + 0.2 + legend2_ymin = legend_ymin + 0.188 + else: + legend2_ymin = legend_ymin + 0.105 + + myLegend2 = ROOT.TLegend(legend2_xmin, legend2_ymin, 0.96, legend2_ymax) + self.utils.setup_legend(myLegend2, 0.045) + + name = 'hmain_{}_R{}_{{}}_{}-{}'.format(self.observable, jetR, min_pt_truth, max_pt_truth) + ymax, ymin = self.get_max_min(name, overlay_list, maxbins) + + h_list = [] + text_list = [] + + for i, subconfig_name in enumerate(self.obs_subconfig_list): + + if subconfig_name not in overlay_list: + continue + + obs_setting = self.obs_settings[i] + grooming_setting = self.grooming_settings[i] + obs_label = self.obs_labels[i] + maxbin = maxbins[i] + + if subconfig_name == overlay_list[0]: + marker = 20 + marker_pythia = 24 + color = 1 + elif subconfig_name == overlay_list[1]: + marker = 21 + marker_pythia = 25 + color = 600-6 + elif subconfig_name == overlay_list[2]: + marker = 33 + marker_pythia = 27 + color = 632-4 + else: # subconfig_name == overlay_list[3]: + marker = 34 + marker_pythia = 28 + color = 416-2 + + name = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + if grooming_setting: + fraction_tagged = getattr(self, 'tagging_fraction_R{}_{}_{}-{}'.format( + jetR, obs_label, min_pt_truth, max_pt_truth)) + + if grooming_setting and maxbin: + h = self.truncate_hist(getattr(self, name), None, maxbin+1, (name+'_trunc').replace("__", "_")) + else: + h = self.truncate_hist(getattr(self, name), None, maxbin, (name+'_trunc').replace("__", "_")) + h_sys = getattr(self, 'hResult_{}_systotal_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)) + + h.SetDirectory(0) + h.SetMarkerSize(1.5) + h.SetMarkerStyle(marker) + h.SetMarkerColor(color) + h.SetLineStyle(1) + h.SetLineWidth(2) + h.SetLineColor(color) + + h_sys.SetLineColor(0) + h_sys.SetFillColor(color) + h_sys.SetFillColorAlpha(color, 0.3) + h_sys.SetFillStyle(1001) + h_sys.SetLineWidth(0) + + if subconfig_name == overlay_list[0]: + + pad1.cd() + xmin = self.obs_config_dict[subconfig_name]['obs_bins_truth'][0] + xmax = self.obs_config_dict[subconfig_name]['obs_bins_truth'][-1] + if maxbin: + xmax = self.obs_config_dict[subconfig_name]['obs_bins_truth'][maxbin] + myBlankHisto = ROOT.TH1F('myBlankHisto','Blank Histogram', 1, xmin, xmax) + myBlankHisto.SetNdivisions(505) + xtitle = self.xtitle + ytitle = self.ytitle + if single_alpha and plot_ratio and plot_pp_data and plot_MC: + xtitle = xtitle.replace("#it{#alpha}", "#it{#alpha}=1") + ytitle = ytitle.replace("#it{#alpha}", "#it{#alpha}=1") + myBlankHisto.SetXTitle(xtitle) + myBlankHisto.GetYaxis().SetTitleOffset(1.1 if plot_ratio else 1.4) + myBlankHisto.SetYTitle(ytitle) + if jetR == 0.2: + if min_pt_truth == 20: + myBlankHisto.SetMaximum(1.1*ymax) + elif min_pt_truth == 40: + if self.observable == "mass" and grooming_setting: + myBlankHisto.SetMaximum(1.9*ymax) + elif self.use_prev_result: + myBlankHisto.SetMaximum(1.8*ymax) + elif plot_pp_data and plot_MC and single_alpha: + myBlankHisto.SetMaximum(2*ymax) + else: + myBlankHisto.SetMaximum(1.7*ymax) + elif min_pt_truth == 60: + if self.observable == "mass": + myBlankHisto.SetMaximum(1.9*ymax) + else: + myBlankHisto.SetMaximum(1.15*ymax) + elif self.observable == "mass": + myBlankHisto.SetMaximum(1.9*ymax) + else: + myBlankHisto.SetMaximum(1.3*ymax) + elif jetR == 0.4: + if min_pt_truth == 20: + myBlankHisto.SetMaximum(1.5*ymax) + elif min_pt_truth == 40: + myBlankHisto.SetMaximum(1.35*ymax) + elif min_pt_truth == 60: + myBlankHisto.SetMaximum(1.15*ymax) + else: + myBlankHisto.SetMaximum(1.5*ymax) + else: + myBlankHisto.SetMaximum(1.5*ymax) + if setlogy: + if self.observable == "mass": + myBlankHisto.SetMaximum(1e2*ymax) + else: + myBlankHisto.SetMaximum(5*ymax) + myBlankHisto.SetMinimum(ymin/2) + elif plot_ratio: + myBlankHisto.SetMinimum(2e-4) # Don't draw 0 on top panel + else: + myBlankHisto.SetMinimum(0.) + myBlankHisto.GetYaxis().SetTitleSize(0.06) + myBlankHisto.GetYaxis().SetTitleOffset(1.2) + myBlankHisto.GetYaxis().SetLabelSize(0.06) + myBlankHisto.Draw('E') + if setlogy: + pad1.SetLogy() + + # Initialize ratio plot + if plot_ratio: + + c.cd() + minpad2y = 0.3 if (plot_pp_data and plot_MC) else 0 + pad2 = ROOT.TPad("pad2", "pad2", 0, minpad2y, 1, minpad1y) + pad2.SetTopMargin(0) + pad2.SetBottomMargin(0.3 if not (plot_pp_data and plot_MC) else 0) + pad2.SetLeftMargin(0.2) + pad2.SetRightMargin(0.04) + pad2.SetTicks(1, 1) + pad2.Draw() + pad2.cd() + + myBlankHisto2 = myBlankHisto.Clone("myBlankHisto_C") + if plot_MC: + if self.is_pp: + myBlankHisto2.SetYTitle("#frac{Data}{%s}" % MC) + else: + myBlankHisto2.SetYTitle("#frac{Pb#minusPb}{%s}" % MC) + elif plot_pp_data: + if self.use_prev_result: + myBlankHisto2.SetYTitle("#frac{5.02 TeV}{2.76 TeV}") + else: + myBlankHisto2.SetYTitle("#frac{Pb#minusPb}{pp}") + myBlankHisto2.GetYaxis().SetTitleSize(20) + myBlankHisto2.GetYaxis().SetTitleFont(43) + myBlankHisto2.GetYaxis().SetTitleOffset(3) + myBlankHisto2.GetYaxis().SetLabelFont(43) + myBlankHisto2.GetYaxis().SetLabelSize(25) + myBlankHisto2.GetYaxis().SetNdivisions(505) + if not plot_pp_data or not plot_MC: + myBlankHisto2.GetYaxis().SetTitleOffset(2.5) + myBlankHisto2.SetXTitle(xtitle) + myBlankHisto2.GetXaxis().SetTitleSize(30) + myBlankHisto2.GetXaxis().SetTitleFont(43) + myBlankHisto2.GetXaxis().SetTitleOffset(2.2) + myBlankHisto2.GetXaxis().SetLabelFont(43) + myBlankHisto2.GetXaxis().SetLabelSize(25) + if plot_ratio_same_scale: + if jetR == 0.2: + myBlankHisto2.GetYaxis().SetRangeUser(0.2, 2.4) + elif jetR == 0.4: + myBlankHisto2.GetYaxis().SetRangeUser(0.5, 1.9) + else: + myBlankHisto2.GetYaxis().SetRangeUser(0, 2.2) + elif jetR == 0.2: + if min_pt_truth == 20: + myBlankHisto2.GetYaxis().SetRangeUser(0.6, 1.75) + elif min_pt_truth == 40: + myBlankHisto2.GetYaxis().SetRangeUser(0.78, 1.299) + elif min_pt_truth == 60: + myBlankHisto2.GetYaxis().SetRangeUser(0.55, 1.499) + else: + myBlankHisto2.GetYaxis().SetRangeUser(0.5, 1.99) + elif jetR == 0.4: + if min_pt_truth == 20: + myBlankHisto2.GetYaxis().SetRangeUser(0.81, 1.72) + elif min_pt_truth == 40: + myBlankHisto2.GetYaxis().SetRangeUser(0.7, 2.1) + elif min_pt_truth == 60: + myBlankHisto2.GetYaxis().SetRangeUser(0.75, 1.55) + else: + myBlankHisto2.GetYaxis().SetRangeUser(0.5, 1.99) + else: + myBlankHisto2.GetYaxis().SetRangeUser(0.5, 1.99) + myBlankHisto2.Draw() + + line = ROOT.TLine(0, 1, xmax, 1) + line.SetLineColor(920+2) + line.SetLineStyle(2) + line.Draw() + + # Initialize third pad if need to also plot pp ratio + if plot_pp_data and plot_MC: + c.cd() + pad3 = ROOT.TPad("pad3", "pad3", 0, 0, 1, minpad2y) + pad3.SetTopMargin(0) + pad3.SetBottomMargin(0.4) + pad3.SetLeftMargin(0.2) + pad3.SetRightMargin(0.04) + pad3.SetTicks(1,1) + pad3.Draw() + pad3.cd() + + myBlankHisto3 = myBlankHisto2.Clone("myBlankHisto3") + myBlankHisto3.SetYTitle("#frac{Pb#minusPb}{pp}") + myBlankHisto3.SetXTitle(xtitle) + myBlankHisto3.GetXaxis().SetTitleSize(30) + myBlankHisto3.GetXaxis().SetTitleFont(43) + myBlankHisto3.GetXaxis().SetTitleOffset(3) + myBlankHisto3.GetXaxis().SetLabelFont(43) + myBlankHisto3.GetXaxis().SetLabelSize(25) + myBlankHisto3.GetYaxis().SetTitleSize(20) + myBlankHisto3.GetYaxis().SetTitleFont(43) + myBlankHisto3.GetYaxis().SetTitleOffset(3) + myBlankHisto3.GetYaxis().SetLabelFont(43) + myBlankHisto3.GetYaxis().SetLabelSize(25) + myBlankHisto3.GetYaxis().SetNdivisions(505) + if plot_ratio_same_scale: + if jetR == 0.2: + myBlankHisto3.GetYaxis().SetRangeUser(0.2, 2.4) + elif jetR == 0.4: + myBlankHisto3.GetYaxis().SetRangeUser(0.5, 1.9) + else: + myBlankHisto3.GetYaxis().SetRangeUser(0, 2.2) + elif jetR == 0.2: + if min_pt_truth == 20: + myBlankHisto3.GetYaxis().SetRangeUser(0.6, 1.75) + elif min_pt_truth == 40: + myBlankHisto3.GetYaxis().SetRangeUser(0.78, 1.299) + elif min_pt_truth == 60: + myBlankHisto3.GetYaxis().SetRangeUser(0.55, 1.499) + else: + myBlankHisto3.GetYaxis().SetRangeUser(0.5, 1.99) + elif jetR == 0.4: + if min_pt_truth == 20: + myBlankHisto3.GetYaxis().SetRangeUser(0.81, 1.72) + elif min_pt_truth == 40: + myBlankHisto3.GetYaxis().SetRangeUser(0.7, 2.1) + elif min_pt_truth == 60: + myBlankHisto3.GetYaxis().SetRangeUser(0.75, 1.55) + else: + myBlankHisto3.GetYaxis().SetRangeUser(0.5, 1.99) + else: + myBlankHisto3.GetYaxis().SetRangeUser(0.5, 1.99) + myBlankHisto3.Draw() + + line2 = line.Clone("line2") + line2.Draw("same") + + hMC = None; fraction_tagged_MC = None; + h_pp_data = None; h_pp_sys = None; + if plot_MC: + if MC.lower() == "pythia": + if grooming_setting and maxbin: + hMC, fraction_tagged_MC = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin+1, + MC='Pythia', overlay=True) + else: + hMC, fraction_tagged_MC = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin, + MC='Pythia', overlay=True) + + elif MC.lower() == "herwig": + if grooming_setting: + hMC, fraction_tagged_MC = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin+1, + 'Herwig', overlay=True) + else: + hMC, fraction_tagged_MC = self.MC_prediction( + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin, + 'Herwig', overlay=True) + + else: + raise NotImplementedError("MC must be either Pythia or Herwig.") + + plot_errors = False + if plot_errors: + hMC.SetMarkerSize(0) + hMC.SetMarkerStyle(0) + hMC.SetMarkerColor(color) + hMC.SetFillColor(color) + else: + hMC.SetLineColor(color) + hMC.SetLineColorAlpha(color, 0.5) + hMC.SetLineWidth(4) + + if plot_pp_data: + + if self.use_prev_result: + # Use previous Pb-Pb result (use pp name for convenience) + h_pp_data, h_prev_sys_up, h_prev_sys_down = self.get_h_prev_result( + jetR, obs_label, min_pt_truth, max_pt_truth) + # Construct TGraphAsymmErrors to store asymmetric up and down uncertainties + x = array('d', [h_prev_sys_up.GetBinCenter(i) for i in \ + range(1, h_prev_sys_up.GetNbinsX()+1)]) + y = array('d', [h_prev_sys_up.GetBinContent(i) for i in \ + range(1, h_prev_sys_up.GetNbinsX()+1)]) + exl = array('d', [x[i-1]-h_prev_sys_up.GetBinLowEdge(i) for i in \ + range(1, h_prev_sys_up.GetNbinsX()+1)]) + eyl = array('d', [h_prev_sys_down.GetBinError(i) for i in \ + range(1, h_prev_sys_down.GetNbinsX()+1)]) + exh = array('d', [h_prev_sys_up.GetBinLowEdge(i)-x[i-2] for i in \ + range(2, h_prev_sys_up.GetNbinsX()+2)]) + eyh = array('d', [h_prev_sys_up.GetBinError(i) for i in \ + range(1, h_prev_sys_up.GetNbinsX()+1)]) + h_pp_sys = ROOT.TGraphAsymmErrors(len(x), x, y, exl, exh, eyl, eyh) + else: + h_pp_data, h_pp_sys = self.get_pp_data( + jetR, obs_label, min_pt_truth, max_pt_truth, + [h.GetBinLowEdge(i) for i in range(1, h.GetNbinsX()+2)]) + + plot_errors = True + if plot_errors: + h_pp_data.SetMarkerSize(1.5) + h_pp_data.SetMarkerStyle(marker_pythia) #27) + h_pp_data.SetMarkerColor(color) + h_pp_data.SetFillColor(color) + h_pp_data.SetLineStyle(9) + h_pp_data.SetLineWidth(2) + h_pp_data.SetLineColor(color) + h_pp_sys.SetLineColor(0) + h_pp_sys.SetFillColor(color) + #h_pp_sys.SetFillColorAlpha(color, 0.8) + h_pp_sys.SetFillStyle(3004) + h_pp_sys.SetLineWidth(0) + else: + h_pp_data.SetLineColor(color) + h_pp_data.SetLineColorAlpha(color, 0.5) + h_pp_data.SetLineWidth(4) + + if plot_ratio: + if self.use_prev_result and plot_pp_data: + # Take ratios to get the correct systematic uncertainties + hRatioSysUp = h_sys.Clone() + hRatioSysUp.SetName('{}_RatioUp'.format(h_sys.GetName())) + hRatioSysUp.Divide(h_prev_sys_up) + hRatioSysDown = h_sys.Clone() + hRatioSysDown.SetName('{}_RatioDown'.format(h_sys.GetName())) + hRatioSysDown.Divide(h_prev_sys_down) + # Construct TGraphAsymmErrors + x = array('d', [hRatioSysUp.GetBinCenter(i) for i in \ + range(1, hRatioSysUp.GetNbinsX()+1)]) + y = array('d', [hRatioSysUp.GetBinContent(i) for i in \ + range(1, hRatioSysUp.GetNbinsX()+1)]) + exl = array('d', [x[i-1]-hRatioSysUp.GetBinLowEdge(i) for i in \ + range(1, hRatioSysUp.GetNbinsX()+1)]) + eyl = array('d', [hRatioSysDown.GetBinError(i) for i in \ + range(1, hRatioSysDown.GetNbinsX()+1)]) + exh = array('d', [hRatioSysUp.GetBinLowEdge(i)-x[i-2] for i in \ + range(2, hRatioSysUp.GetNbinsX()+2)]) + eyh = array('d', [hRatioSysUp.GetBinError(i) for i in \ + range(1, hRatioSysUp.GetNbinsX()+1)]) + hRatioSys = ROOT.TGraphAsymmErrors(len(x), x, y, exl, exh, eyl, eyh) + else: + hRatioSys = h_sys.Clone('%s_Ratio' % h_sys.GetName()) + if plot_MC: + hRatioSys.Divide(hMC) + elif plot_pp_data: + hRatioSys.Divide(h_pp_sys) + hRatioSys.SetLineColor(0) + hRatioSys.SetFillColor(color) + hRatioSys.SetFillColorAlpha(color, 0.3) + hRatioSys.SetFillStyle(1001) + hRatioSys.SetLineWidth(0) + hRatioSys.SetMaximum(1.99) + + hRatioStat = h.Clone('%s_Ratio' % h.GetName()) + if plot_MC: + hRatioStat.Divide(hMC) + elif plot_pp_data: + hRatioStat.Divide(h_pp_data) + #for i in range(1, hRatioStat.GetNbinsX()+1): + # new_error = math.sqrt(h.GetBinError(i) ** 2 + h_pp_data.GetBinError(i) ** 2) + # hRatioStat.SetBinError(i, new_error) + hRatioStat.SetMarkerSize(1.5) + hRatioStat.SetMarkerStyle(marker) + hRatioStat.SetMarkerColor(color) + hRatioStat.SetLineStyle(1) + hRatioStat.SetLineWidth(2) + hRatioStat.SetLineColor(color) + hRatioStat.SetMaximum(1.99) + + pad2.cd() + if plot_MC or plot_pp_data: + # TGraphAsymmErrors doesn't have DrawCopy + if self.use_prev_result: + hRatioSys.Draw('E2 same') + hRatioStat.Draw('PE X0 same') + else: + hRatioSys.DrawCopy('E2 same') + hRatioStat.DrawCopy('PE X0 same') + + # If both plot_MC and plot_pp_data, need to do pad3 + if plot_MC and plot_pp_data: + pad3.cd() + + hRatioSys2 = h_sys.Clone('%s_Ratio2' % h_sys.GetName()) + hRatioSys2.Divide(h_pp_sys) + hRatioSys2.SetLineColor(0) + hRatioSys2.SetFillColor(color) + hRatioSys2.SetFillColorAlpha(color, 0.3) + hRatioSys2.SetFillStyle(1001) + hRatioSys2.SetLineWidth(0) + hRatioSys2.SetMaximum(1.99) + hRatioSys2.DrawCopy('E2 same') + + hRatioStat2 = h.Clone('%s_Ratio2' % h.GetName()) + hRatioStat2.Divide(h_pp_data) + hRatioStat2.SetMarkerSize(1.5) + hRatioStat2.SetMarkerStyle(marker) + hRatioStat2.SetMarkerColor(color) + hRatioStat2.SetLineStyle(1) + hRatioStat2.SetLineWidth(2) + hRatioStat2.SetLineColor(color) + hRatioStat2.SetMaximum(1.99) + hRatioStat2.DrawCopy('PE X0 same') + + pad1.cd() + if plot_MC: + plot_errors = False + if plot_errors: + hMC.DrawCopy('E3 same') + else: + hMC.DrawCopy('L hist same') + + if plot_pp_data: + plot_errors = True + if plot_errors: + # TGraphAsymmErrors doesn't have DrawCopy + if self.use_prev_result: + h_pp_sys.Draw('E2 same') + h_pp_data.Draw('PE X0 same') + else: + h_pp_sys.DrawCopy('E2 same') + h_pp_data.DrawCopy('PE X0 same') + else: + h_pp_data.DrawCopy('L hist same') + + h_sys.DrawCopy('E2 same') + h.DrawCopy('PE X0 same') + + subobs_label = self.utils.formatted_subobs_label(self.observable) + text = '' + if subobs_label: + text += subobs_label + if obs_setting: + text += ' = ' + str(obs_setting) + text_list.append(text) + h_list.append(h) + + pad1.cd() + for i, h, text in zip(range(len(h_list)), h_list, text_list): + if i < 2: + if single_alpha and plot_ratio and plot_pp_data and plot_MC: + myLegend.AddEntry(h, "0#minus10% Pb#minusPb data") #text + " (girth)", 'pe') + else: + myLegend.AddEntry(h, text, 'pe') + else: + myLegend2.AddEntry(h, text, 'pe') + myLegend.AddEntry(h_sys, 'Pb#minusPb syst. uncert.', 'f') + if plot_pp_data: + if self.use_prev_result: + myLegend.AddEntry(h_pp_data, 'Pb#minusPb @ 2.76 TeV', 'pe') + if plot_errors: + myLegend.AddEntry(h_pp_sys, '2.76 TeV syst. uncert.', 'f') + elif not (plot_MC and single_alpha): + myLegend.AddEntry(h_pp_data, 'pp data', 'pe') + if plot_errors: + myLegend.AddEntry(h_pp_sys, 'pp syst. uncert.', 'f') + else: + myLegend2.AddEntry(h_pp_data, 'pp data', 'pe') + if plot_errors: + myLegend2.AddEntry(h_pp_sys, 'pp syst. uncert.', 'f') + if MC.lower() == "pythia": + myLegend2.AddEntry(hMC, 'PYTHIA8 Monash2013', 'l') + elif MC.lower() == "herwig": + myLegend2.AddEntry(hMC, 'Herwig7 Default', 'l') + if plot_MC and not (plot_pp_data and single_alpha): + if MC.lower() == "pythia": + myLegend.AddEntry(hMC, 'PYTHIA8 Monash2013', 'l') + elif MC.lower() == "herwig": + myLegend.AddEntry(hMC, 'Herwig7 Default', 'l') + + text_xval = 0.22 if single_alpha and plot_pp_data and plot_MC else 0.27 + if not plot_ratio: + text_xval = 0.26 + text_yval = 0.85 + delta_y = 0.075 # if single_alpha and plot_pp_data and plot_MC else 0.065 + text_latex = ROOT.TLatex() + text_latex.SetNDC() + text = 'ALICE {}'.format(self.figure_approval_status) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + #if single_alpha and plot_ratio and plot_pp_data and plot_MC: + text = '#sqrt{#it{s}_{NN}} = 5.02 TeV' + #else: + # text = '0#minus10% Pb-Pb #sqrt{#it{s}_{NN}} = 5.02 TeV' + text_latex.SetTextSize(0.055) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + text = 'Ch.-particle anti-#it{k}_{T} jets' + text_latex.SetTextSize(0.055) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + text = '#it{R} = ' + str(jetR) + ', | #it{#eta}_{jet}| < %s' % str(0.9 - jetR) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + text = str(min_pt_truth) + ' < #it{p}_{T}^{ch jet} < ' + str(max_pt_truth) + ' GeV/#it{c}' + text_latex.SetTextSize(0.055) + text_latex.DrawLatex(text_xval, text_yval, text) + text_yval -= delta_y + + if grooming_setting: + text = self.utils.formatted_grooming_label(grooming_setting) #.replace("#beta}", "#beta}_{SD}") + text_latex.DrawLatex(text_xval, text_yval - 0.005, text) + text_yval -= delta_y + 0.005 + + if not (single_alpha and plot_ratio and plot_pp_data and plot_MC): + text = "0#minus10% Pb#minusPb data" + xmin = legend_xmin+0.12 if (plot_ratio and plot_pp_data and plot_MC) else legend_xmin+0.09 + if not plot_ratio: + xmin -= 0.02 + text_latex.DrawLatex(xmin, legend_ymax+0.02, text) + + myLegend.Draw() + if len(h_list) > 2 or (single_alpha and plot_MC and plot_pp_data): + myLegend2.Draw() + + name = 'h_{}_R{}_{}-{}_{}{}'.format(self.observable, + self.utils.remove_periods(jetR), int(min_pt_truth), + int(max_pt_truth), i_config, self.file_format) + if plot_MC: + if plot_pp_data: + name = 'h_{}_R{}_{}-{}_ppComp+{}_{}{}'.format( + self.observable, self.utils.remove_periods(jetR), int(min_pt_truth), + int(max_pt_truth), MC, i_config, self.file_format) + else: + name = 'h_{}_R{}_{}-{}_{}_{}{}'.format( + self.observable, self.utils.remove_periods(jetR), int(min_pt_truth), + int(max_pt_truth), MC, i_config, self.file_format) + elif plot_pp_data: + name = 'h_{}_R{}_{}-{}_ppComp_{}{}'.format(self.observable, self.utils.remove_periods(jetR), + int(min_pt_truth), int(max_pt_truth), + i_config, self.file_format) + + output_dir = getattr(self, 'output_dir_final_results') + if not os.path.exists(os.path.join(output_dir, 'all_results')): + os.mkdir(os.path.join(output_dir, 'all_results')) + outputFilename = os.path.join(output_dir, 'all_results', name) + c.SaveAs(outputFilename) + + # Write result to ROOT file + final_result_root_filename = os.path.join(output_dir, 'fFinalResults.root') + fFinalResults = ROOT.TFile(final_result_root_filename, 'UPDATE') + c.Write() + + c.Close() + + #---------------------------------------------------------------------- + # Return maximum & minimum y-values of unfolded results in a subconfig list + def get_max_min(self, name, overlay_list, maxbins): + + total_min = 1e10 + total_max = -1e10 + + for i, subconfig_name in enumerate(self.obs_subconfig_list): + + if subconfig_name not in overlay_list: + continue + + obs_setting = self.obs_settings[i] + grooming_setting = self.grooming_settings[i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + maxbin = maxbins[i] + + h = getattr(self, name.format(obs_label)) + minbin = 1 + maxbin_adj = maxbin if (maxbin != None) else h.GetNbinsX() + if 'SD' in obs_label: + minbin += 1 + if maxbin != None: + maxbin_adj += 1 + content = [ h.GetBinContent(j) for j in range(minbin, maxbin_adj+1) ] + + min_val = min(content) + if min_val < total_min: + total_min = min_val + max_val = max(content) + if max_val > total_max: + total_max = max_val + + + return (total_max, total_min) + + +#---------------------------------------------------------------------- +if __name__ == '__main__': + + # Define arguments + parser = argparse.ArgumentParser(description='Jet substructure analysis') + parser.add_argument('-c', '--configFile', action='store', + type=str, metavar='configFile', + default='analysis_config.yaml', + help='Path of config file for analysis') + + # Parse the arguments + args = parser.parse_args() + + print('Configuring...') + print('configFile: \'{0}\''.format(args.configFile)) + + # If invalid configFile is given, exit + if not os.path.exists(args.configFile): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + analysis = RunAnalysisAng(config_file = args.configFile) + analysis.run_analysis() diff --git a/pyjetty/alice_analysis/analysis/user/ang_pp/copy_bins.py b/pyjetty/alice_analysis/analysis/user/ang/pp/copy_bins.py similarity index 100% rename from pyjetty/alice_analysis/analysis/user/ang_pp/copy_bins.py rename to pyjetty/alice_analysis/analysis/user/ang/pp/copy_bins.py diff --git a/pyjetty/alice_analysis/analysis/user/ang_pp/plot_angularity_theory_figures.py b/pyjetty/alice_analysis/analysis/user/ang/pp/plot_angularity_theory_figures.py similarity index 87% rename from pyjetty/alice_analysis/analysis/user/ang_pp/plot_angularity_theory_figures.py rename to pyjetty/alice_analysis/analysis/user/ang/pp/plot_angularity_theory_figures.py index a7b42b6b3..7f48ec536 100644 --- a/pyjetty/alice_analysis/analysis/user/ang_pp/plot_angularity_theory_figures.py +++ b/pyjetty/alice_analysis/analysis/user/ang/pp/plot_angularity_theory_figures.py @@ -40,7 +40,7 @@ def __init__(self, input_dir='', output_dir='', **kwargs): #self.base_dir = '/home/james/plot-angularity/' self.base_dir = input_dir self.file = 'ang/final_results/fFinalResults.root' - self.beta_list = [1.5, 2, 3] + self.alpha_list = [1.5, 2, 3] self.R_list = [0.2] self.Omega_list = [0.2, 0.4, 0.8, 2] self.pt_list = [20, 40, 60, 80, 100] @@ -65,8 +65,8 @@ def __init__(self, input_dir='', output_dir='', **kwargs): self.ymax_ratio = 15 # R=0.4 scale factors - self.scale_factor_ungroomed_R04_beta2 = 0.7 - self.scale_factor_ungroomed_R04_beta3 = 0.25 + self.scale_factor_ungroomed_R04_alpha2 = 0.7 + self.scale_factor_ungroomed_R04_alpha3 = 0.25 self.xtitle = '#it{#lambda}_{#it{#alpha}}' self.ytitle = '#frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}} ' + \ @@ -120,11 +120,11 @@ def __init__(self, input_dir='', output_dir='', **kwargs): for R in self.R_list: self.lambda_np[min_pt][R] = self.Lambda / (formula_pt * R) self.lambda_np_groomed[min_pt][R] = {} - for beta in self.beta_list: - self.lambda_np_groomed[min_pt][R][str(beta)] = ( - self.Lambda / (formula_pt * R))**beta * self.sd_zcut**(1 - beta) - #(self.Lambda / formula_pt)**(beta) * (R**(self.sd_beta) / self.sd_zcut) \ - #**(beta - 1) )**(1 / (1 + self.sd_beta)) / R**(beta) + for alpha in self.alpha_list: + self.lambda_np_groomed[min_pt][R][str(alpha)] = ( + self.Lambda / (formula_pt * R))**alpha * self.sd_zcut**(1 - alpha) + #(self.Lambda / formula_pt)**(alpha) * (R**(self.sd_beta) / self.sd_zcut) \ + #**(alpha - 1) )**(1 / (1 + self.sd_beta)) / R**(alpha) #------------------------------------------------------ @@ -181,11 +181,11 @@ def plot_multipanel(self, R=1, min_pt=60, max_pt=80, groomed=False, Fnp=False): self.g_ratio_dict = {'1.5': [], '2': [], '3': []} # Plot each pt bin in its own pad - self.plot_angularity(c, pad=1, R=R, beta = '1.5', + self.plot_angularity(c, pad=1, R=R, alpha = '1.5', min_pt=min_pt, max_pt=max_pt, groomed=groomed, Fnp=Fnp) - self.plot_angularity(c, pad=2, R=R, beta = '2', + self.plot_angularity(c, pad=2, R=R, alpha = '2', min_pt=min_pt, max_pt=max_pt, groomed=groomed, Fnp=Fnp) - self.plot_angularity(c, pad=3, R=R, beta = '3', + self.plot_angularity(c, pad=3, R=R, alpha = '3', min_pt=min_pt, max_pt=max_pt, groomed=groomed, Fnp=Fnp) outfilename = "hJetAngularity_Theory_%s_PtBin%i-%i" % (str(R), min_pt, max_pt) @@ -198,9 +198,9 @@ def plot_multipanel(self, R=1, min_pt=60, max_pt=80, groomed=False, Fnp=False): c.SaveAs(output_filename) #------------------------------------------------------------------------------------------- - # Get beta histograms from file, and call plot_beta_overlay to draw them + # Get alpha histograms from file, and call plot_alpha_overlay to draw them #------------------------------------------------------------------------------------------- - def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, + def plot_angularity(self, c, pad=0, R=1, alpha = '', min_pt=60, max_pt=80, groomed=False, Fnp=False): filename = self.predictions[min_pt][str(R)] @@ -216,9 +216,9 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, # Get data hist h_name ='hmain_ang_R{}_{}{}_{}-{}_trunc'.format( - R, beta, grooming_label, min_pt, max_pt) + R, alpha, grooming_label, min_pt, max_pt) h_sys_name = 'hResult_ang_systotal_R{}_{}{}_n3_{}-{}'.format( - R, beta, grooming_label, min_pt, max_pt) + R, alpha, grooming_label, min_pt, max_pt) self.h = f.Get(h_name) self.h_sys = f.Get(h_sys_name) self.h.SetDirectory(0) @@ -233,8 +233,8 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, n = self.h.GetNbinsX() min_bin = None if groomed: - min_bin = self.h.FindBin(self.lambda_np_groomed[min_pt][R][beta]) + 1 - #if self.h.GetBinCenter(min_bin) <= self.lambda_np_groomed[min_pt][R][beta]: + min_bin = self.h.FindBin(self.lambda_np_groomed[min_pt][R][alpha]) + 1 + #if self.h.GetBinCenter(min_bin) <= self.lambda_np_groomed[min_pt][R][alpha]: # min_bin += 1 else: min_bin = self.h.FindBin(self.lambda_np[min_pt][R]) + 1 @@ -246,8 +246,8 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, self.h.Scale(1./integral_perturbative) self.h_sys.Scale(1./integral_perturbative) - # Set ymax corresponding to the beta=1.5 case - if beta == '1.5': + # Set ymax corresponding to the alpha=1.5 case + if alpha == '1.5': if R == 0.2: if min_pt == 20: self.ymax = 1.9 * self.h.GetMaximum() @@ -280,33 +280,33 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, self.ymax = 1.9 * self.h.GetMaximum() self.scale_label = self.scale_histogram_for_visualization( - self.h_sys, R, beta, min_pt, groomed) - self.scale_histogram_for_visualization(self.h, R, beta, min_pt, groomed) + self.h_sys, R, alpha, min_pt, groomed) + self.scale_histogram_for_visualization(self.h, R, alpha, min_pt, groomed) # Get folded theory predictions iterlist = self.folding_labels if not Fnp else self.Omega_list for i, folding_label in enumerate(iterlist): - beta_label = beta.replace('.', '') + alpha_label = alpha.replace('.', '') if groomed: - beta_label += '_SD_zcut02_B0' + alpha_label += '_SD_zcut02_B0' name_cent = None; name_min = None; name_max = None if Fnp: name_cent = 'theory_cent_ang_R{}_{}_ch_Fnp_Omega{}_PtBin{}-{}_0'.format( - self.remove_periods(str(R)), beta_label, folding_label, min_pt, max_pt) + self.remove_periods(str(R)), alpha_label, folding_label, min_pt, max_pt) name_min = 'theory_min_ang_R{}_{}_ch_Fnp_Omega{}_PtBin{}-{}_0'.format( - self.remove_periods(str(R)), beta_label, folding_label, min_pt, max_pt) + self.remove_periods(str(R)), alpha_label, folding_label, min_pt, max_pt) name_max = 'theory_max_ang_R{}_{}_ch_Fnp_Omega{}_PtBin{}-{}_0'.format( - self.remove_periods(str(R)), beta_label, folding_label, min_pt, max_pt) + self.remove_periods(str(R)), alpha_label, folding_label, min_pt, max_pt) else: name_cent = 'theory_cent_ang_R{}_{}_ch_PtBin{}-{}_{}'.format( - self.remove_periods(str(R)), beta_label, min_pt, max_pt, i) + self.remove_periods(str(R)), alpha_label, min_pt, max_pt, i) name_min = 'theory_min_ang_R{}_{}_ch_PtBin{}-{}_{}'.format( - self.remove_periods(str(R)), beta_label, min_pt, max_pt, i) + self.remove_periods(str(R)), alpha_label, min_pt, max_pt, i) name_max = 'theory_max_ang_R{}_{}_ch_PtBin{}-{}_{}'.format( - self.remove_periods(str(R)), beta_label, min_pt, max_pt, i) + self.remove_periods(str(R)), alpha_label, min_pt, max_pt, i) h_theory_cent = f.Get(name_cent) h_theory_min = f.Get(name_min) @@ -328,8 +328,8 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, # Normalize such that integral in perturbative region is 1 min_bin = None if groomed: - min_bin = h_theory_cent_rebinned.FindBin(self.lambda_np_groomed[min_pt][R][beta]) + 1 - #if self.h.GetBinCenter(min_bin) <= self.lambda_np_groomed[min_pt][R][beta]: + min_bin = h_theory_cent_rebinned.FindBin(self.lambda_np_groomed[min_pt][R][alpha]) + 1 + #if self.h.GetBinCenter(min_bin) <= self.lambda_np_groomed[min_pt][R][alpha]: # min_bin += 1 else: min_bin = h_theory_cent_rebinned.FindBin(self.lambda_np[min_pt][R]) + 1 @@ -346,9 +346,9 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, h_theory_max_rebinned.Scale(1./integral_perturbative_theory) # Scale additionally for visualization - self.scale_histogram_for_visualization(h_theory_cent_rebinned, R, beta, min_pt, groomed) - self.scale_histogram_for_visualization(h_theory_min_rebinned, R, beta, min_pt, groomed) - self.scale_histogram_for_visualization(h_theory_max_rebinned, R, beta, min_pt, groomed) + self.scale_histogram_for_visualization(h_theory_cent_rebinned, R, alpha, min_pt, groomed) + self.scale_histogram_for_visualization(h_theory_min_rebinned, R, alpha, min_pt, groomed) + self.scale_histogram_for_visualization(h_theory_max_rebinned, R, alpha, min_pt, groomed) x = np.array([h_theory_cent_rebinned.GetXaxis().GetBinCenter(i) for i in range(1, n+1)]) y = np.array([h_theory_cent_rebinned.GetBinContent(i) for i in range(1, n+1)]) @@ -356,21 +356,21 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, yerrup = np.array([h_theory_max_rebinned.GetBinContent(i)-y[i-1] for i in range(1, n+1)]) yerrdn = np.array([y[i-1]-h_theory_min_rebinned.GetBinContent(i) for i in range(1, n+1)]) g_theory = ROOT.TGraphAsymmErrors(n, x, y, xerrdn, xerrup, yerrdn, yerrup) - g_theory_name = 'g_theory_%i_%s' % (i, str(beta)) + g_theory_name = 'g_theory_%i_%s' % (i, str(alpha)) if Fnp: g_theory_name += '_Fnp' g_theory.SetNameTitle(g_theory_name, g_theory_name) - self.g_theory_dict[beta].append(g_theory) + self.g_theory_dict[alpha].append(g_theory) # Construct ratios in self.h_ratio_dict, self.g_ratio_dict self.construct_ratio(self.h, self.h_sys, h_theory_cent_rebinned, h_theory_min_rebinned, h_theory_max_rebinned, n, x, xerrup, xerrdn, y, yerrup, yerrdn, - R, beta, i, pad) + R, alpha, i, pad) f.Close() - # Plot overlay of beta values - self.plot_beta_overlay(c, pad, R, beta, min_pt, max_pt, groomed, Fnp) + # Plot overlay of alpha values + self.plot_alpha_overlay(c, pad, R, alpha, min_pt, max_pt, groomed, Fnp) # Keep histograms in memory self.plot_list.append(self.h) @@ -381,9 +381,9 @@ def plot_angularity(self, c, pad=0, R=1, beta = '', min_pt=60, max_pt=80, self.plot_list.append(self.blank_histo_list) #------------------------------------------------------------------------------------------- - # Draw beta histograms in given pad + # Draw alpha histograms in given pad #------------------------------------------------------------------------------------------- - def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): + def plot_alpha_overlay(self, c, pad, R, alpha, min_pt, max_pt, groomed, Fnp): if groomed: self.logy = True @@ -525,8 +525,8 @@ def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): self.plot_list.append(line_lambda_np) else: # groomed case line_lambda_np_groomed = ROOT.TLine( - self.lambda_np_groomed[min_pt][R][beta], 0, - self.lambda_np_groomed[min_pt][R][beta], + self.lambda_np_groomed[min_pt][R][alpha], 0, + self.lambda_np_groomed[min_pt][R][alpha], self.ymax*2 if self.logy else self.ymax*0.6) line_lambda_np_groomed.SetLineColor(self.colors[-1])#51) line_lambda_np_groomed.SetLineStyle(2) @@ -554,16 +554,16 @@ def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): iterlist = self.folding_labels if not Fnp else self.Omega_list for i, folding_label in enumerate(iterlist): - self.g_theory_dict[beta][i].SetFillColorAlpha(self.colors[i], 0.25) - self.g_theory_dict[beta][i].SetLineColor(self.colors[i]) - self.g_theory_dict[beta][i].SetLineWidth(3) - self.g_theory_dict[beta][i].Draw('L 3 same') + self.g_theory_dict[alpha][i].SetFillColorAlpha(self.colors[i], 0.25) + self.g_theory_dict[alpha][i].SetLineColor(self.colors[i]) + self.g_theory_dict[alpha][i].SetLineWidth(3) + self.g_theory_dict[alpha][i].Draw('L 3 same') if Fnp: - leg.AddEntry(self.g_theory_dict[beta][i], + leg.AddEntry(self.g_theory_dict[alpha][i], 'NLL\' #otimes F_{NP}^{#Omega=%.1f} #otimes PYTHIA8' % folding_label, 'lf') else: - leg.AddEntry(self.g_theory_dict[beta][i], 'NLL\' #otimes '+folding_label, 'lf') + leg.AddEntry(self.g_theory_dict[alpha][i], 'NLL\' #otimes '+folding_label, 'lf') self.h_sys.Draw('E2 same') if not groomed: @@ -641,13 +641,13 @@ def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): if pad == 2: system5y = ymax-(2+len(self.Omega_list))*dy system5 = ROOT.TLatex(x+0.2 if pad in [2,3] else x+0.25, system5y, - '#it{{#alpha}} = {}{}'.format(beta,self.scale_label)) + '#it{{#alpha}} = {}{}'.format(alpha,self.scale_label)) system5.SetNDC() if pad in [1]: - beta_size = size / 1.3 + alpha_size = size / 1.3 else: - beta_size = size - system5.SetTextSize(beta_size) + alpha_size = size + system5.SetTextSize(alpha_size) system5.Draw() self.plot_list.append(system5) @@ -711,20 +711,20 @@ def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): for i, folding_label in enumerate(iterlist): # Draw tgraph with sys uncertainty - self.g_ratio_dict[beta][i].SetFillColorAlpha(self.colors[i], 0.25) - self.g_ratio_dict[beta][i].SetLineColor(self.colors[i]) - self.g_ratio_dict[beta][i].SetLineWidth(3) - self.g_ratio_dict[beta][i].Draw('3 same') + self.g_ratio_dict[alpha][i].SetFillColorAlpha(self.colors[i], 0.25) + self.g_ratio_dict[alpha][i].SetLineColor(self.colors[i]) + self.g_ratio_dict[alpha][i].SetLineWidth(3) + self.g_ratio_dict[alpha][i].Draw('3 same') # Draw th1 with stat uncertainty - self.h_ratio_dict[beta][i].SetMarkerColorAlpha(self.colors[i], self.alpha) - self.h_ratio_dict[beta][i].SetLineColorAlpha(self.colors[i], self.alpha) - self.h_ratio_dict[beta][i].SetFillColorAlpha(self.colors[i], self.alpha) - self.h_ratio_dict[beta][i].SetLineColor(self.colors[i]) - self.h_ratio_dict[beta][i].SetLineWidth(2) - self.h_ratio_dict[beta][i].SetMarkerStyle(self.markers[i]) - self.h_ratio_dict[beta][i].SetMarkerSize(self.marker_size) - self.h_ratio_dict[beta][i].Draw('PE same') + self.h_ratio_dict[alpha][i].SetMarkerColorAlpha(self.colors[i], self.alpha) + self.h_ratio_dict[alpha][i].SetLineColorAlpha(self.colors[i], self.alpha) + self.h_ratio_dict[alpha][i].SetFillColorAlpha(self.colors[i], self.alpha) + self.h_ratio_dict[alpha][i].SetLineColor(self.colors[i]) + self.h_ratio_dict[alpha][i].SetLineWidth(2) + self.h_ratio_dict[alpha][i].SetMarkerStyle(self.markers[i]) + self.h_ratio_dict[alpha][i].SetMarkerSize(self.marker_size) + self.h_ratio_dict[alpha][i].Draw('PE same') line_lambda_np_ratio = None; line_lambda_np_gr_ratio = None; if not groomed: @@ -738,8 +738,8 @@ def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): self.plot_list.append(line_lambda_np_ratio) else: # groomed case line_lambda_np_gr_ratio = ROOT.TLine( - self.lambda_np_groomed[min_pt][R][beta], self.ymin_ratio, - self.lambda_np_groomed[min_pt][R][beta], self.ymax_ratio) + self.lambda_np_groomed[min_pt][R][alpha], self.ymin_ratio, + self.lambda_np_groomed[min_pt][R][alpha], self.ymax_ratio) line_lambda_np_gr_ratio.SetLineColor(self.colors[-1])#51) line_lambda_np_gr_ratio.SetLineStyle(2) line_lambda_np_gr_ratio.SetLineWidth(2) @@ -753,15 +753,15 @@ def plot_beta_overlay(self, c, pad, R, beta, min_pt, max_pt, groomed, Fnp): # - self.g_theory_dict with tgraph of ratio with sys uncertainties #------------------------------------------------------------------------------------------- def construct_ratio(self, h, h_sys, h_theory_cent, h_theory_min, h_theory_max, n, x, - xerrup, xerrdn, y, yerrup, yerrdn, R, beta, i, pad): + xerrup, xerrdn, y, yerrup, yerrdn, R, alpha, i, pad): # Construct central value h_ratio = h.Clone() - h_ratio.SetName('{}_{}_{}_{}'.format(h_ratio.GetName(), R, beta, pad)) + h_ratio.SetName('{}_{}_{}_{}'.format(h_ratio.GetName(), R, alpha, pad)) h_ratio.SetDirectory(0) h_ratio.Divide(h_theory_cent) self.plot_list.append(h_ratio) - self.h_ratio_dict[beta].append(h_ratio) + self.h_ratio_dict[alpha].append(h_ratio) y_ratio = np.array([h_ratio.GetBinContent(i) for i in range(1, n+1)]) # Construct systematic uncertainties: combine data and theory uncertainties @@ -787,8 +787,8 @@ def construct_ratio(self, h, h_sys, h_theory_cent, h_theory_min, h_theory_max, n # Note: invert direction of asymmetric uncertainty g_ratio = ROOT.TGraphAsymmErrors(n, x, y_ratio, xerrdn, xerrup, y_sys_total_up, y_sys_total_dn) - g_ratio.SetName('g_ratio_{}_{}'.format(i, beta)) - self.g_ratio_dict[beta].append(g_ratio) + g_ratio.SetName('g_ratio_{}_{}'.format(i, alpha)) + self.g_ratio_dict[alpha].append(g_ratio) #------------------------------------------------------------------------------------------- # Rebin theory histogram according to data binning @@ -832,58 +832,58 @@ def remove_negative_bin_edges(self, h): #------------------------------------------------------------------------------------------- # Scale vertical amplitude of histogram, for visualization #------------------------------------------------------------------------------------------- - def scale_histogram_for_visualization(self, h, R, beta, min_pt, groomed): + def scale_histogram_for_visualization(self, h, R, alpha, min_pt, groomed): scale_factor = 1. if groomed: if R == 0.2: if min_pt == 20: - if beta == '3': + if alpha == '3': scale_factor = 0.1 #elif min_pt == 40: #elif R == 0.4: else: if R == 0.2: if min_pt == 20: - if beta == '2': + if alpha == '2': scale_factor = 0.25 - elif beta == '3': + elif alpha == '3': scale_factor = 0.02 elif min_pt == 40: - if beta == '2': + if alpha == '2': scale_factor = 0.45 - elif beta == '3': + elif alpha == '3': scale_factor = 0.06 elif min_pt == 60: - if beta == '2': + if alpha == '2': scale_factor = 0.3 - elif beta == '3': + elif alpha == '3': scale_factor = 0.12 elif min_pt == 80: - if beta == '2': + if alpha == '2': scale_factor = 0.42 - if beta == '3': + if alpha == '3': scale_factor = 0.035 elif R == 0.4: if min_pt == 20: - if beta == '2': + if alpha == '2': scale_factor = 0.45 - elif beta == '3': + elif alpha == '3': scale_factor = 0.2 elif min_pt == 40: - if beta == '2': + if alpha == '2': scale_factor = 0.45 - elif beta == '3': + elif alpha == '3': scale_factor = 0.25 elif min_pt == 60: - if beta == '2': + if alpha == '2': scale_factor = 0.65 - elif beta == '3': + elif alpha == '3': scale_factor = 0.27 elif min_pt == 80: - if beta == '2': + if alpha == '2': scale_factor = 0.33 - elif beta == '3': + elif alpha == '3': scale_factor = 0.15 h.Scale(scale_factor) diff --git a/pyjetty/alice_analysis/analysis/user/ang_pp/run_analysis_ang.py b/pyjetty/alice_analysis/analysis/user/ang/pp/run_analysis_ang.py similarity index 93% rename from pyjetty/alice_analysis/analysis/user/ang_pp/run_analysis_ang.py rename to pyjetty/alice_analysis/analysis/user/ang/pp/run_analysis_ang.py index 9cc9a1762..277d11652 100755 --- a/pyjetty/alice_analysis/analysis/user/ang_pp/run_analysis_ang.py +++ b/pyjetty/alice_analysis/analysis/user/ang/pp/run_analysis_ang.py @@ -183,7 +183,7 @@ def initialize_user_config(self): print('is_pp: {}'.format(self.is_pp)) # Whether or not to use the previous preliminary result in final plots - self.use_prev_prelim = config['use_prev_prelim'] + self.use_prev_prelim = config['use_prev_prelim'] if 'use_prev_prelim' in config else False self.histutils = ROOT.RUtil.HistUtils() @@ -203,7 +203,7 @@ def initialize_user_config(self): if self.do_theory: self.theory_dir = config['theory_dir'] - self.theory_beta = config['theory_beta'] + self.theory_alpha = config['theory_alpha'] self.theory_pt_bins = config['theory_pt_bins'] self.theory_pt_bins_center = [(self.theory_pt_bins[i] + self.theory_pt_bins[i+1]) / 2 for \ i in range(len(self.theory_pt_bins)-1)] @@ -241,6 +241,7 @@ def initialize_user_config(self): else: self.do_theory = False + self.do_theory_F_np = False if self.do_theory: self.load_pt_scale_factors(self.theory_pt_scale_factors_filepath) @@ -271,8 +272,8 @@ def load_theory_response(self): raise NotImplementedError("Not implemented for more than one grooming setting.") for jetR in self.jetR_list: - for beta in self.theory_beta: - label = "R%s_%s" % (str(jetR).replace('.', ''), str(beta).replace('.', '')) + for alpha in self.theory_alpha: + label = "R%s_%s" % (str(jetR).replace('.', ''), str(alpha).replace('.', '')) if gs: label_gr = label + '_' + gl @@ -344,9 +345,9 @@ def load_theory_response(self): else: # Generated theory folding matrix needs rebinning # Response axes: ['p_{T}^{ch jet}', 'p_{T}^{jet, parton}', - # '#lambda_{#beta}^{ch}', '#lambda_{#beta}^{parton}'] + # '#lambda_{#alpha}^{ch}', '#lambda_{#alpha}^{parton}'] # as compared to the usual - # ['p_{T,det}', 'p_{T,truth}', '#lambda_{#beta,det}', '#lambda_{#beta,truth}'] + # ['p_{T,det}', 'p_{T,truth}', '#lambda_{#alpha,det}', '#lambda_{#alpha,truth}'] det_pt_bin_array = array('d', self.theory_pt_bins) tru_pt_bin_array = det_pt_bin_array obs_bins = array('d', self.theory_obs_bins) @@ -491,10 +492,10 @@ def load_theory_histograms(self): if self.do_theory_F_np: obs_bins_Fnp_gr = obs_bins_Fnp - # Create histogram for each value of R and beta + # Create histogram for each value of R and alpha for jetR in self.jetR_list: - for beta in self.theory_beta: # beta value - label = "R%s_%s" % (str(jetR).replace('.', ''), str(beta).replace('.', '')) + for alpha in self.theory_alpha: # alpha value + label = "R%s_%s" % (str(jetR).replace('.', ''), str(alpha).replace('.', '')) if gs: label_gr = label + '_' + gl @@ -576,17 +577,17 @@ def load_theory_histograms(self): if not self.use_old: th_dir = os.path.join( self.theory_dir, "ungr_ALICE_R%s" % str(jetR).replace('.', ''), - "beta%s" % str(beta).replace('.', 'p'), "pT%s_%s" % (pt_min, pt_max)) + "alpha%s" % str(alpha).replace('.', 'p'), "pT%s_%s" % (pt_min, pt_max)) else: th_dir = os.path.join( self.theory_dir, "old", "R%s" % str(jetR).replace('.', ''), - "pT%s_%s" % (pt_min, pt_max), "beta%s" % str(beta).replace('.', 'p')) + "pT%s_%s" % (pt_min, pt_max), "alpha%s" % str(alpha).replace('.', 'p')) th_dir_gr = None if gs: # != None: th_dir_gr = os.path.join( self.theory_dir, "gr_ALICE_R%s" % str(jetR).replace('.', ''), - "beta%s" % str(beta).replace('.', 'p'), "pT%s_%s" % (pt_min, pt_max)) + "alpha%s" % str(alpha).replace('.', 'p'), "pT%s_%s" % (pt_min, pt_max)) val_li = None; val_li_gr = None; val_li_Fnp = None; val_li_Fnp_gr = None if self.exp_test: @@ -680,12 +681,12 @@ def load_theory_histograms(self): if disable_tagging_fraction: missed_tagging_fraction = 0 elif missed_tagging_fraction < 0: - print("WARNING: missed tagging fraction %f < 0 (\\beta = %s, R = %s)." % \ - (missed_tagging_fraction, beta, jetR), "Manually setting to 0.") + print("WARNING: missed tagging fraction %f < 0 (\\alpha = %s, R = %s)." % \ + (missed_tagging_fraction, alpha, jetR), "Manually setting to 0.") missed_tagging_fraction = 0 elif missed_tagging_fraction > 1: - print("WARNING: missed tagging fraction %f > 1 (\\beta = %s, R = %s)." % \ - (missed_tagging_fraction, beta, jetR), "Manually setting to 0.") + print("WARNING: missed tagging fraction %f > 1 (\\alpha = %s, R = %s)." % \ + (missed_tagging_fraction, alpha, jetR), "Manually setting to 0.") missed_tagging_fraction = 0 if missed_tagging_fraction == 0: @@ -766,26 +767,26 @@ def load_theory_histograms(self): # Fold from parton to CH level and scale by MPI print("Folding theory predictions...") - self.fold_theory(jetR, beta, parton_hists, scale_req) + self.fold_theory(jetR, alpha, parton_hists, scale_req) if gs: print("Folding theory predictions with %s..." % gl.replace('_', ' ')) - self.fold_theory(jetR, beta, parton_hists_gr, scale_req, gl) + self.fold_theory(jetR, alpha, parton_hists_gr, scale_req, gl) # Also do NP shape function predictions + fold from H to CH level if self.do_theory_F_np: print("Applying NP shape function...") - self.apply_np_shape_fn(jetR, beta, parton_hists_Fnp, scale_req) + self.apply_np_shape_fn(jetR, alpha, parton_hists_Fnp, scale_req) if gs: print("Applying NP shape function with %s..." % gl.replace('_', ' ')) - self.apply_np_shape_fn(jetR, beta, parton_hists_Fnp_gr, scale_req, gl) + self.apply_np_shape_fn(jetR, alpha, parton_hists_Fnp_gr, scale_req, gl) #---------------------------------------------------------------------- # Fold theoretical predictions #---------------------------------------------------------------------- - def fold_theory(self, jetR, beta, parton_hists, scale_req, grooming_label=None): + def fold_theory(self, jetR, alpha, parton_hists, scale_req, grooming_label=None): - label = "R%s_%s" % (str(jetR).replace('.', ''), str(beta).replace('.', '')) + label = "R%s_%s" % (str(jetR).replace('.', ''), str(alpha).replace('.', '')) if grooming_label: label += '_' + grooming_label @@ -829,17 +830,17 @@ def fold_theory(self, jetR, beta, parton_hists, scale_req, grooming_label=None): else: printstring += "..." print(printstring) - self.mpi_scale_theory(jetR, beta, ri, response, folded_ch_hists, folded_h_hists, + self.mpi_scale_theory(jetR, alpha, ri, response, folded_ch_hists, folded_h_hists, scale_req, grooming_label) #---------------------------------------------------------------------- # Fold theoretical predictions #---------------------------------------------------------------------- - def mpi_scale_theory(self, jetR, beta, ri, response, folded_ch_hists, folded_h_hists, + def mpi_scale_theory(self, jetR, alpha, ri, response, folded_ch_hists, folded_h_hists, scale_req, grooming_label=None): - label = "R%s_%s" % (str(jetR).replace('.', ''), str(beta).replace('.', '')) + label = "R%s_%s" % (str(jetR).replace('.', ''), str(alpha).replace('.', '')) using_sd_grooming = False if grooming_label: label += '_' + grooming_label @@ -1053,9 +1054,9 @@ def mpi_scale_theory(self, jetR, beta, ri, response, folded_ch_hists, folded_h_h #--------------------------------------------------------------- # Apply NP corrections via shape function (includes MPI & hadronization) #--------------------------------------------------------------- - def apply_np_shape_fn(self, jetR, beta, parton_hists, scale_req, gl=None): + def apply_np_shape_fn(self, jetR, alpha, parton_hists, scale_req, gl=None): - label = "R%s_%s" % (str(jetR).replace('.', ''), str(beta).replace('.', '')) + label = "R%s_%s" % (str(jetR).replace('.', ''), str(alpha).replace('.', '')) grooming = False if gl: label += '_' + gl @@ -1130,7 +1131,7 @@ def apply_np_shape_fn(self, jetR, beta, parton_hists, scale_req, gl=None): pTs = [self.pt_avg_jetR(self.theory_pt_bins[i], self.theory_pt_bins[i+1], jetR) for i in range(0, len(self.theory_pt_bins) - 1)] h_np = self.histutils.convolve_F_np( - Omega, jetR, beta, array('d', obs_bins_Fnp), + Omega, jetR, alpha, array('d', obs_bins_Fnp), len(obs_bins_center_Fnp), array('d', obs_bins_center_Fnp), array('d', obs_bins_width_Fnp), array('d', self.theory_pt_bins), len(self.theory_pt_bins_center), @@ -1272,7 +1273,7 @@ def plot_all_results(self, jetR): for i_config, overlay_list in enumerate(self.plot_overlay_list): - if len(overlay_list) > 1: + if len(overlay_list) >= 1: self.plot_final_result_overlay(i_config, jetR, overlay_list) @@ -1293,13 +1294,13 @@ def plot_final_result(self, jetR, obs_label, obs_setting, grooming_setting): self.utils.set_plotting_options() ROOT.gROOT.ForceStyle() - if self.do_theory and float(obs_label.split('_')[0]) in self.theory_beta and \ + if self.do_theory and float(obs_label.split('_')[0]) in self.theory_alpha and \ ( (self.use_old and not grooming_setting) or not self.use_old ): # Compare parton-level theory to parton-level event generators print("Plotting parton-level theory comparisons for", obs_label) self.plot_parton_comp(jetR, obs_label, obs_setting, grooming_setting) - if self.do_theory_F_np and float(obs_label.split('_')[0]) in self.theory_beta and \ + if self.do_theory_F_np and float(obs_label.split('_')[0]) in self.theory_alpha and \ ( (self.use_old and not grooming_setting) or not self.use_old ): # Compare parton-level theory to parton-level event generators print("Plotting F_NP-convolved theory comparisons for", obs_label) @@ -1314,7 +1315,7 @@ def plot_final_result(self, jetR, obs_label, obs_setting, grooming_setting): self.plot_observable(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, maxbin, plot_MC=True) - if self.do_theory and float(obs_label.split('_')[0]) in self.theory_beta and \ + if self.do_theory and float(obs_label.split('_')[0]) in self.theory_alpha and \ ( (self.use_old and not grooming_setting) or not self.use_old ): self.plot_observable(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, maxbin, plot_MC=False, plot_theory=True) @@ -1328,7 +1329,7 @@ def plot_final_result(self, jetR, obs_label, obs_setting, grooming_setting): min_pt_truth, max_pt_truth, maxbin) if min_pt_truth == 40 and (jetR == 0.2 or jetR == 0.4): - # Only want to compare to girth with \beta=1 + # Only want to compare to girth with \alpha=1 if obs_label == '1': self.plot_obs_comp(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, maxbin) @@ -1382,9 +1383,9 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, #fraction_tagged = getattr(self, '{}_fraction_tagged'.format(name)) # maxbin+1 in grooming case to account for extra tagging bin if grooming_setting and maxbin: - h = self.truncate_hist(getattr(self, name), maxbin+1, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin+1, (name+'_trunc').replace('__', '_')) else: - h = self.truncate_hist(getattr(self, name), maxbin, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin, (name+'_trunc').replace('__', '_')) h.SetMarkerSize(1.5) h.SetMarkerStyle(20) h.SetMarkerColor(color) @@ -1452,7 +1453,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, color = self.ColorArray[4] if show_np_region: - # P vs NP cutoff point: lambda_beta ~ Lambda / (pT * R) -- use avg value of pT for the bin. + # P vs NP cutoff point: lambda_alpha ~ Lambda / (pT * R) -- use avg value of pT for the bin. # Formula assumes that jet pT xsec falls like pT^(-5.5) formula_pt = (4.5/3.5)*(min_pt_truth**-3.5 - max_pt_truth**-3.5) / \ (min_pt_truth**-4.5 - max_pt_truth**-4.5) @@ -1662,7 +1663,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, hPythia = None; fraction_tagged_pythia = None; if grooming_setting: hPythia, fraction_tagged_pythia = self.MC_prediction( - jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin+1, 'Pythia') + jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin+1 if maxbin else None, 'Pythia') else: hPythia, fraction_tagged_pythia = self.MC_prediction( jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin, 'Pythia') @@ -1745,7 +1746,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, text_latex.DrawLatex(text_xval, 0.66-delta, text) if grooming_setting: - text = self.utils.formatted_grooming_label(grooming_setting).replace("#beta}", "#beta}_{SD}") + text = self.utils.formatted_grooming_label(grooming_setting)#.replace("#beta}", "#beta}_{SD}") text_latex.DrawLatex(text_xval, 0.66-2*delta, text) text_latex.SetTextSize(0.04) @@ -1769,7 +1770,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, self.utils.setup_legend(myLegend, 0.035) if show_everything_else: myLegend.AddEntry(h, 'ALICE pp', 'pe') - myLegend.AddEntry(h_sys, 'Sys. uncertainty', 'f') + myLegend.AddEntry(h_sys, 'Syst. uncertainty', 'f') if plot_pythia: myLegend.AddEntry(hPythia, 'PYTHIA8 Monash2013', 'pe') if plot_herwig: @@ -1800,7 +1801,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, else: for ri, lab in enumerate(self.theory_response_labels): myLegend.AddEntry(hcent_list[ri], 'NLL\' #otimes '+lab, 'lf') - myLegend.AddEntry(line, '#it{#lambda}_{#it{#beta}}^{NP region} #leq' + \ + myLegend.AddEntry(line, '#it{#lambda}_{#it{#alpha}}^{NP region} #leq' + \ '#Lambda / (#it{p}_{T,jet}^{ch} #it{R})', 'lf') myLegend.Draw() @@ -1849,9 +1850,9 @@ def plot_theory_ratios(self, jetR, obs_label, obs_setting, grooming_setting, name = 'hmain_{}_R{}_{}_{}-{}'.format(self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) if grooming_setting and maxbin: - h = self.truncate_hist(getattr(self, name), maxbin+1, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin+1, (name+'_trunc').replace('__', '_')) else: - h = self.truncate_hist(getattr(self, name), maxbin, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin, (name+'_trunc').replace('__', '_')) n_obs_bins_truth = self.n_bins_truth(obs_label) truth_bin_array = self.truth_bin_array(obs_label) @@ -2565,7 +2566,7 @@ def plot_obs_comp(self, jetR, obs_label, obs_setting, grooming_setting, fraction_tagged = getattr(self, 'tagging_fraction_R{}_{}_{}-{}'.format( jetR, obs_label, min_pt_truth, max_pt_truth)) #fraction_tagged = getattr(self, '{}_fraction_tagged'.format(name)) - h = self.truncate_hist(getattr(self, name), maxbin, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin, (name+'_trunc').replace('__', '_')) h.SetMarkerSize(1.5) h.SetMarkerStyle(20) h.SetMarkerColor(color) @@ -2651,7 +2652,7 @@ def plot_obs_comp(self, jetR, obs_label, obs_setting, grooming_setting, delta = 0.07 if grooming_setting: - text = self.utils.formatted_grooming_label(grooming_setting).replace("#beta}", "#beta}_{SD}") + text = self.utils.formatted_grooming_label(grooming_setting)#.replace("#beta}", "#beta}_{SD}") text_latex.DrawLatex(0.57, 0.59-delta, text) text_latex.SetTextSize(0.04) @@ -2661,7 +2662,7 @@ def plot_obs_comp(self, jetR, obs_label, obs_setting, grooming_setting, myLegend = ROOT.TLegend(0.25, 0.7, 0.45, 0.85) self.utils.setup_legend(myLegend,0.035) myLegend.AddEntry(h, 'This measurement', 'pe') - myLegend.AddEntry(h_sys, 'Sys. uncertainty', 'f') + myLegend.AddEntry(h_sys, 'Syst. uncertainty', 'f') myLegend.AddEntry(hCompStat, 'ALI-PREL-339374', 'pe') myLegend.Draw() @@ -2688,7 +2689,7 @@ def plot_obs_comp(self, jetR, obs_label, obs_setting, grooming_setting, #---------------------------------------------------------------------- def MC_prediction(self, jetR, obs_setting, obs_label, min_pt_truth, max_pt_truth, maxbin, MC='Pythia', overlay=False): - + if MC.lower() == 'pythia': hMC = self.get_pythia_from_response(jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay) @@ -2704,53 +2705,68 @@ def MC_prediction(self, jetR, obs_setting, obs_label, min_pt_truth, fraction_tagged_MC = n_jets_tagged/n_jets_inclusive hMC.Scale(1./n_jets_inclusive, 'width') - + return [hMC, fraction_tagged_MC] #---------------------------------------------------------------------- def get_pythia_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay=False): - output_dir = getattr(self, 'output_dir_main') - - prev_prelim = False - if self.use_prev_prelim and overlay and (jetR == 0.2 or jetR == 0.4) \ - and min_pt_truth == 40 and obs_label == '1': - prev_prelim = True - # Need to rebin response for the binning used by previous preliminary result - filepath = os.path.join(output_dir, 'response_prev_prelim.root') - - if not os.path.exists(filepath): - # Create rebinned THn with these binnings, and write to file - print("Rebinning response matrix for previous preliminary masurement...") - name_thn = self.utils.name_thn(self.observable, jetR, obs_label) - name_thn_rebinned = self.utils.name_thn_rebinned(self.observable, jetR, obs_label) - name_roounfold = 'roounfold_response_R{}_{}'.format(jetR, obs_label) - thn = ROOT.TFile(self.main_response, 'READ').Get(name_thn) - thn.SetName(name_thn) - label = 'R{}_{}'.format(jetR, obs_label) - pt_bins_truth = array('d', [5, 20, 40, 60, 80, 100, 150, 200]) - pt_bins_det = array('d', [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150]) - obs_bins = getattr(self, "xedges_prev_prelim_%s" % jetR) - self.utils.rebin_response( - filepath, thn, name_thn_rebinned, name_roounfold, label, len(pt_bins_det)-1, - pt_bins_det, len(obs_bins)-1, obs_bins, len(pt_bins_truth)-1, pt_bins_truth, - len(obs_bins)-1, obs_bins, self.observable, do_roounfoldresponse=False) - else: + # Use direct (unmatched) files instead of projecting fastsim RM + do_direct_files = True #(len(self.theory_predictions) >= (int(recoils) + 1)) + + h = None + if do_direct_files: # Read from TH2 + + f = ROOT.TFile(self.main_response, 'READ') + name = "h_%s_JetPt_Truth_R%s_%sScaled" % (self.observable, str(jetR), obs_label) \ + if obs_label else "h_%s_JetPt_Truth_R%sScaled" % (self.observable, str(jetR)) + th2 = f.Get(name) + if not th2: + raise AttributeError("%s not found in %s" % (name, self.main_response)) + if not th2.GetSumw2(): + th2.Sumw2() + + # Set range and binning to be the same as data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_data = getattr(self, name_data) + pt_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + obs_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + move_underflow = (obs_bin_array[0] < 0) + + th2.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) + h = th2.ProjectionY() + + # Finally, rename and truncate the histogram to the correct size + name = 'hPythia_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_rebin = h.Rebin(len(obs_bin_array)-1, name+"_Rebin", obs_bin_array) + if move_underflow: + h_rebin.SetBinContent(1, h.GetBinContent(0)) + h_rebin.SetBinError(1, h.GetBinError(0)) + h = self.truncate_hist(h_rebin, None, maxbin, name) + h.SetDirectory(0) + + else: # Get projection of RM + output_dir = getattr(self, 'output_dir_main') + filepath = os.path.join(output_dir, 'response.root') - f = ROOT.TFile(filepath, 'READ') + f = ROOT.TFile(filepath, 'READ') - thn_name = 'hResponse_JetPt_{}_R{}_{}_rebinned'.format(self.observable, jetR, obs_label) - thn = f.Get(thn_name) - thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) + thn_name = 'hResponse_JetPt_{}_R{}_{}_rebinned'.format( + self.observable, jetR, obs_label).replace("__", "_") + thn = f.Get(thn_name) + if not thn: + raise AttributeError("%s not found in %s" % (thn_name, filepath)) + thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) - name = 'hPythia_{}_R{}_{}_{}-{}'.format( - self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) - if prev_prelim: - h = thn.Projection(3) - else: - h = self.truncate_hist(thn.Projection(3), maxbin, name) - h.SetDirectory(0) + name = 'hPythia_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h = self.truncate_hist(thn.Projection(3), None, maxbin, name) + h.SetDirectory(0) return h @@ -2758,17 +2774,63 @@ def get_pythia_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, def get_herwig_from_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, maxbin, overlay=False): - filepath = os.path.join(self.output_dir_fastsim_generator1, 'response.root') - f = ROOT.TFile(filepath, 'READ') + # Use direct (unmatched) files instead of projecting fastsim RM + do_direct_files = True #(len(self.theory_predictions) >= (int(recoils) + 1)) + + h = None - thn_name = 'hResponse_JetPt_{}_R{}_{}_rebinned'.format(self.observable, jetR, obs_label) - thn = f.Get(thn_name) - thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) + if do_direct_files: # Read from TH2 - name = 'hHerwig_{}_R{}_{}_{}-{}'.format( - self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) - h = self.truncate_hist(thn.Projection(3), maxbin, name) - h.SetDirectory(0) + f = ROOT.TFile(self.fastsim_response_list[1], 'READ') + name = "h_%s_JetPt_Truth_R%s_%sScaled" % (self.observable, str(jetR), obs_label) \ + if obs_label else "h_%s_JetPt_Truth_R%sScaled" % (self.observable, str(jetR)) + th2 = f.Get(name) + if not th2: + raise AttributeError("%s not found in %s" % (name, self.main_response)) + if not th2.GetSumw2(): + th2.Sumw2() + + # Set range and binning to be the same as data + name_data = 'hmain_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_data = getattr(self, name_data) + pt_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + obs_bin_array = array('d', [h_data.GetXaxis().GetBinLowEdge(i) for \ + i in range(1, h_data.GetNbinsX()+2)]) + move_underflow = (obs_bin_array[0] < 0) + + th2.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) + h = th2.ProjectionY() + + # Finally, rename and truncate the histogram to the correct size + name = 'hHerwig_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h_rebin = h.Rebin(len(obs_bin_array)-1, name+"_Rebin", obs_bin_array) + if move_underflow: + h_rebin.SetBinContent(1, h.GetBinContent(0)) + h_rebin.SetBinError(1, h.GetBinError(0)) + h = self.truncate_hist(h_rebin, None, maxbin, name) + h.SetDirectory(0) + + else: # Get projection of RM + try: + filepath = os.path.join(self.output_dir_fastsim_generator1, 'response.root') + except AttributeError: # No fastsim generator + return None + f = ROOT.TFile(filepath, 'READ') + + thn_name = 'hResponse_JetPt_{}_R{}_{}_rebinned'.format( + self.observable, jetR, obs_label).replace("__", "_") + thn = f.Get(thn_name) + if not thn: + raise AttributeError("%s not found in %s" % (thn_name, filepath)) + thn.GetAxis(1).SetRangeUser(min_pt_truth, max_pt_truth) + + name = 'hHerwig_{}_R{}_{}_{}-{}'.format( + self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) + h = self.truncate_hist(thn.Projection(3), None, maxbin, name) + h.SetDirectory(0) return h @@ -2826,6 +2888,9 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p for name in overlay_list: if "SD" not in name: break + elif "config_m" in name: + # Do not put groomed mass on log plot + break elif name == overlay_list[-1]: setlogy = True pad1.SetLogy() @@ -2886,9 +2951,9 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p h.SetBinError(i, 0.) else: if grooming_setting and maxbin: - h = self.truncate_hist(getattr(self, name), maxbin+1, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin+1, (name+'_trunc').replace('__', '_')) else: - h = self.truncate_hist(getattr(self, name), maxbin, name+'_trunc') + h = self.truncate_hist(getattr(self, name), None, maxbin, (name+'_trunc').replace('__', '_')) h_sys = getattr(self, 'hResult_{}_systotal_R{}_{}_{}-{}'.format( self.observable, jetR, obs_label, min_pt_truth, max_pt_truth)) @@ -2922,17 +2987,22 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p myBlankHisto = ROOT.TH1F('myBlankHisto','Blank Histogram', 1, xmin, xmax) myBlankHisto.SetNdivisions(505) myBlankHisto.SetXTitle(xtitle) - myBlankHisto.GetYaxis().SetTitleOffset(1.3) + ytitleoffset = 1.3 if self.observable == "ang" else 1.5 + myBlankHisto.GetYaxis().SetTitleOffset(ytitleoffset) myBlankHisto.SetYTitle(ytitle) if jetR == 0.2: - if min_pt_truth == 20: - myBlankHisto.SetMaximum(1.1*ymax) - elif min_pt_truth == 40: - myBlankHisto.SetMaximum(1.2*ymax) - elif min_pt_truth == 60: - myBlankHisto.SetMaximum(1.15*ymax) - else: - myBlankHisto.SetMaximum(1.3*ymax) + if self.observable == "ang": + if min_pt_truth == 20: + myBlankHisto.SetMaximum(1.1*ymax) + elif min_pt_truth == 40: + myBlankHisto.SetMaximum(1.2*ymax) + elif min_pt_truth == 60: + myBlankHisto.SetMaximum(1.15*ymax) + else: + myBlankHisto.SetMaximum(1.3*ymax) + else: # mass + myBlankHisto.SetMaximum(1.6*ymax) + elif jetR == 0.4: if min_pt_truth == 20: myBlankHisto.SetMaximum(1.5*ymax) @@ -2942,20 +3012,27 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p myBlankHisto.SetMaximum(1.15*ymax) else: myBlankHisto.SetMaximum(1.5*ymax) + else: myBlankHisto.SetMaximum(1.5*ymax) + if setlogy: - myBlankHisto.SetMaximum(5*ymax) + if self.observable == "mass": + myBlankHisto.SetMaximum(1e2*ymax) + else: + myBlankHisto.SetMaximum(5*ymax) + myBlankHisto.SetMinimum(0.) + if plot_ratio: myBlankHisto.SetMinimum(2e-4) # Don't draw 0 on top panel if setlogy: # the minimum value matters myBlankHisto.SetMinimum(2*ymin/3) myBlankHisto.GetYaxis().SetTitleSize(0.065) - myBlankHisto.GetYaxis().SetTitleOffset(1.1) + myBlankHisto.GetYaxis().SetTitleOffset(ytitleoffset - 0.2) myBlankHisto.GetYaxis().SetLabelSize(0.06) myBlankHisto.Draw('E') - + # Plot ratio if plot_ratio: @@ -3096,8 +3173,13 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p subobs_label = self.utils.formatted_subobs_label(self.observable) text = '' - if subobs_label: - text += '%s = %s' % (subobs_label, obs_setting) + if self.observable == "ang": + if subobs_label: + text += subobs_label + if obs_setting: + text += ' = ' + str(obs_setting) + else: # mass + text += "ALICE data" text_list.append(text) h_list.append(h) @@ -3107,7 +3189,7 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p myLegend.AddEntry(h, text, 'pe') else: myLegend2.AddEntry(h, text, 'pe') - myLegend.AddEntry(h_sys, 'Sys. uncertainty', 'f') + myLegend.AddEntry(h_sys, 'Syst. uncertainty', 'f') if plot_MC: if MC.lower() == "pythia": myLegend.AddEntry(hMC, 'PYTHIA8 Monash2013', 'l') @@ -3136,7 +3218,7 @@ def plot_observable_overlay_subconfigs(self, i_config, jetR, overlay_list, min_p text_latex.DrawLatex(text_xval, 0.63, text) if grooming_setting: - text = self.utils.formatted_grooming_label(grooming_setting).replace("#beta}", "#beta}_{SD}") + text = self.utils.formatted_grooming_label(grooming_setting)#.replace("#beta}", "#beta}_{SD}") text_latex.DrawLatex(text_xval, 0.57, text) myLegend.Draw() @@ -3181,12 +3263,18 @@ def get_max_min(self, name, overlay_list, maxbins): grooming_setting = self.grooming_settings[i] obs_label = self.utils.obs_label(obs_setting, grooming_setting) maxbin = maxbins[i] - + h = getattr(self, name.format(obs_label)) + if maxbin == None: + maxbin_i = h.GetNbinsX() + 1 + else: + maxbin_i = maxbin + 1 + if 'SD' in obs_label: + maxbin_i += 1 if 'SD' in obs_label: - content = [ h.GetBinContent(j) for j in range(2, maxbin+2) ] + content = [ h.GetBinContent(j) for j in range(2, maxbin_i) ] else: - content = [ h.GetBinContent(j) for j in range(1, maxbin+1) ] + content = [ h.GetBinContent(j) for j in range(1, maxbin_i) ] min_val = min(content) if min_val < total_min: diff --git a/pyjetty/alice_analysis/analysis/user/fastsim/mc_comparator.py b/pyjetty/alice_analysis/analysis/user/fastsim/mc_comparator.py index 80cc8f456..1f88652b6 100644 --- a/pyjetty/alice_analysis/analysis/user/fastsim/mc_comparator.py +++ b/pyjetty/alice_analysis/analysis/user/fastsim/mc_comparator.py @@ -42,7 +42,7 @@ def __init__(self, inputFileMC, inputFileFS, inputFileData, configFile, self.subconfig_list = [name for name in list(self.config["ang"].keys()) if 'config' in name ] self.jetR_list = self.config['jetR'] - self.beta_list = self.config['betas'] + self.alpha_list = self.config['alphas'] self.pt_bins = self.config["ang"]["common_settings"]["pt_bins_reported"] @@ -77,8 +77,8 @@ def mc_comparator(self): for jetR in self.jetR_list: for subconfig_name in self.subconfig_list: subconfig = self.config["ang"][subconfig_name] - beta = subconfig["beta"] - label = "R%s_%s" % (str(jetR), str(beta)) + alpha = subconfig["alpha"] + label = "R%s_%s" % (str(jetR), str(alpha)) # Load grooming setting if required use_grooming = False @@ -209,7 +209,7 @@ def mc_comparator(self): text = "%i < #it{p}_{T, jet}^{ch} < %s GeV/#it{c}" % \ (self.pt_bins[i], self.pt_bins[i+1]) text_latex.DrawLatex(0.19, 0.82, text) - text = "#it{R} = " + str(jetR) + ", #it{#beta} = " + str(beta) + text = "#it{R} = " + str(jetR) + ", #it{#alpha} = " + str(alpha) text_latex.DrawLatex(0.19, 0.75, text) if use_grooming: text = "SD: #it{z}_{cut} = %s, #it{#beta}_{SD} = %s" % \ @@ -237,7 +237,7 @@ def mc_comparator(self): h_ratio.GetXaxis().SetTitleOffset(4.) h_ratio.GetXaxis().SetLabelFont(43) h_ratio.GetXaxis().SetLabelSize(20) - h_ratio.SetXTitle('#it{#lambda}_{#it{#beta}=%s}' % beta) + h_ratio.SetXTitle('#it{#lambda}_{#it{#alpha}=%s}' % alpha) h_ratio.GetYaxis().SetTitleSize(20) h_ratio.GetYaxis().SetTitleFont(43) h_ratio.GetYaxis().SetTitleOffset(2.2) diff --git a/pyjetty/alice_analysis/analysis/user/james/plot_theta_g_theory_figures.py b/pyjetty/alice_analysis/analysis/user/james/plot_theta_g_theory_figures.py new file mode 100644 index 000000000..f869fac94 --- /dev/null +++ b/pyjetty/alice_analysis/analysis/user/james/plot_theta_g_theory_figures.py @@ -0,0 +1,847 @@ +""" +macro for plotting multi-paneled angularity theory figures +""" + +# General +import os +import sys +import math +import yaml +import argparse + +# Data analysis and plotting +import ROOT +import numpy as np +from array import * + +# Base class +from pyjetty.alice_analysis.analysis.base import common_base + +# Prevent segmentation fault from C code (doesn't seem to help?) +sys.settrace + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) + +################################################################ +class PlotAngularityFigures(common_base.CommonBase): + + # --------------------------------------------------------------- + # Constructor + # --------------------------------------------------------------- + def __init__(self, input_dir='', output_dir='', **kwargs): + super(PlotAngularityFigures, self).__init__(**kwargs) + + self.output_dir = output_dir + self.file_format = '.pdf' + + #------------------------------------------------------ + + self.base_dir = '/home/ezra/theory_predictions/theta_g' + self.data_file = 'fFinalResults_theta_g_data.root' + self.theory_file = 'folded_scet_calculations.root' + self.R_list = [0.4] + self.pt_list = [60, 80] + self.folding_labels = ['PYTHIA8', 'Herwig7'] + + # Note: logx doesn't work well, since lowest bins are different, and dominate some plots + self.logx = False + # Note: logy is also not great, since the right-hand tails drop to different values + self.logy = False + + # Grooming settings + self.sd_zcut = [0.1] + self.sd_beta = [0, 1, 2] + #self.theory_grooming_settings = [{'sd': [self.sd_zcut, self.sd_beta]}] + # self.utils.grooming_settings + #self.theory_grooming_labels = [self.utils.grooming_label(gs) for\ + # gs in self.theory_grooming_settings] + + # Don't use theory predictions below this value (to disable, set to 0) + self.thetag_cut = 0. + + self.xmin = -0.03 + self.ymax = 13.5 + self.ymin_ratio = 0.4 + self.ymax_ratio = 2.5 + + self.xtitle = '#it{#theta}_{g}' + self.ytitle = '#frac{1}{#it{#sigma}_{#it{#theta}_{g}>#it{#theta}_{g}^{NP}}} ' + \ + '#frac{d#it{#sigma}}{d#it{#theta}_{g}}' + + # y title with integral in denominator + #self.ytitle = '#frac{d#it{#sigma}}{d#it{#theta}_{g}} ' + \ + # '#/#int_{#it{#theta}_{g}^{NP}}^{1} ' + \ + # '#frac{d#it{#sigma}}{d#it{#theta}_{g}} d#it{#theta}_{g}' + + self.left_offset = 0.4 + self.bottom_offset = 0.15 + self.ratio_height = 0.25 + self.top_bottom_scale_factor = (1 - self.ratio_height) / \ + (1 - self.bottom_offset - self.ratio_height) + + #------------------------------------------------------ + + self.marker_data = 21 + self.markers = [20, 34, 33, 22, 23] + self.marker_size = 3 + self.marker_size_ratio = 2 + self.alpha = 0.7 + self.color_data = 1 + self.colors = [ROOT.kRed-7, ROOT.kTeal-8, ROOT.kViolet-8, ROOT.kBlue-9] + + #------------------------------------------------------ + + # Soft scale + # Peturbative region defined by zcut * pT * theta_g^{1 + beta} * R = Lambda + self.theta_np = {} + self.Lambda = 1 + + for i, min_pt in list(enumerate(self.pt_list))[:-1]: + max_pt = self.pt_list[i+1] + + # P vs NP cutoff point ~ Lambda / (pT * R) -- use avg value of pT for the bin. + # Formula assumes that jet pT xsec falls like pT^(-5.5) + formula_pt = (4.5/3.5)*(min_pt**-3.5 - max_pt**-3.5) / \ + (min_pt**-4.5 - max_pt**-4.5) + # Also scale by ~20% to account for shift in full --> charged pT spectrum + formula_pt *= 1.2 + + self.theta_np[min_pt] = { R : { zcut : { beta : (self.Lambda / (zcut * formula_pt * R))**(1 / (1 + beta)) \ + for beta in self.sd_beta } + for zcut in self.sd_zcut } + for R in self.R_list } + + #for R in self.R_list: + # self.theta_np[min_pt][R] = {} + # for zcut in self.sd_zcut: + # self.theta_np[min_pt][R][zcut] = {} + # for beta in self.sd_beta: + # self.theta_np[min_pt][R][zcut][beta] = ( + # self.Lambda / (zcut * formula_pt * R) ) ** (1 / (1 + beta)) + + + #------------------------------------------------------ + # Store paths to all final results in a dictionary + self.predictions = {} + + for i, min_pt in list(enumerate(self.pt_list))[:-1]: + self.predictions[min_pt] = {} + for R in self.R_list: + self.predictions[min_pt][str(R)] = os.path.join(self.base_dir, self.theory_file) + + print(self) + + #------------------------------------------------------------------------------------------- + #------------------------------------------------------------------------------------------- + #------------------------------------------------------------------------------------------- + def plot_results(self): + + self.setOptions() + ROOT.gROOT.ForceStyle() + + for R in self.R_list: + for i, min_pt in list(enumerate(self.pt_list))[:-1]: + max_pt = self.pt_list[i+1] + for zcut in self.sd_zcut: + self.plot_multipanel(R, min_pt, max_pt, zcut) + + #------------------------------------------------------------------------------------------- + def plot_multipanel(self, R, min_pt, max_pt, zcut): + + # Create multi-panel canvas + cname = "c_" + str(R) + '_PtBin' + str(min_pt) + '-' + str(max_pt) + '_zcut' + str(zcut) + c = ROOT.TCanvas(cname, cname, 2400, 1400) + c.SetRightMargin(0.05); + c.SetLeftMargin(self.left_offset); + c.SetTopMargin(0.03); + c.SetBottomMargin(self.bottom_offset / 2); + c.cd() + c.Divide(3, 1, 0.01, 0.) + + # Keep histograms in memory, otherwise there can be problems + # with double deletes (i.e. ROOT then python deletes) + self.plot_list = [] + self.g_theory_dict = { beta : [] for beta in self.sd_beta } + self.h_ratio_dict = { beta : [] for beta in self.sd_beta } + self.g_ratio_dict = { beta : [] for beta in self.sd_beta } + + # Plot each pt bin in its own pad + for i, beta in enumerate(self.sd_beta): + self.plot_theta_g( + c, pad=i+1, R=R, min_pt=min_pt, max_pt=max_pt, zcut=zcut, beta=beta) + + outfilename = "hJet_theta_g_Theory_%s_PtBin%i-%i_zcut%s" % \ + (str(R), min_pt, max_pt, str(zcut).replace('.', '')) + outfilename += self.file_format + output_filename = os.path.join(self.output_dir, outfilename) + c.SaveAs(output_filename) + + #------------------------------------------------------------------------------------------- + # Get alpha histograms from file, and call plot_alpha_overlay to draw them + #------------------------------------------------------------------------------------------- + def plot_theta_g(self, c, pad=0, R=1, min_pt=60, max_pt=80, zcut=0.2, beta=0): + + filename = os.path.join(self.base_dir, self.data_file) + f_data = ROOT.TFile(filename, 'READ') + + self.h = None + self.h_sys = None + self.blank_histo_list = [] + + grooming_label = 'SD_zcut%s_B%s' % (str(zcut).replace('.',''), beta) + + # Get data hist + h_name ='hmain_theta_g_R{}_{}_{}-{}'.format( + R, grooming_label, min_pt, max_pt) + h_sys_name = 'hResult_theta_g_systotal_R{}_{}_{}-{}'.format( + R, grooming_label, min_pt, max_pt) + self.h = f_data.Get(h_name) + self.h_sys = f_data.Get(h_sys_name) + self.h.SetDirectory(0) + self.h_sys.SetDirectory(0) + + # Remove negative bin edges (which has the tagging fraction) + self.h = self.remove_negative_bin_edges(self.h) + self.h_sys = self.remove_negative_bin_edges(self.h_sys) + + # Normalize such that integral in perturbative region is 1 + n = self.h.GetNbinsX() + min_bin = self.h.FindBin(self.theta_np[min_pt][R][zcut][beta]) + 1 + #if self.h.GetBinCenter(min_bin) <= self.lambda_np_groomed[min_pt][R][alpha]: + # min_bin += 1 + if min_bin > n: + min_bin = n + integral_perturbative = self.h.Integral(min_bin, n, 'width') + self.h.Scale(1./integral_perturbative) + self.h_sys.Scale(1./integral_perturbative) + + #self.ymax = 2 * self.h.GetMaximum() * (1 + (pad == 3) * 0.2) + + self.scale_label = self.scale_histogram_for_visualization( + self.h_sys, R, min_pt, zcut, beta) + self.scale_histogram_for_visualization(self.h, R, min_pt, zcut, beta) + + filename = os.path.join(self.base_dir, self.theory_file) + f_theory = ROOT.TFile(filename, 'READ') + + # Get folded theory predictions + for i, folding_label in enumerate(self.folding_labels): + + name_cent = 'h1_folded_theta_g_ch_MPIon_R{}_{}_{}_pT_{}_{}'.format( + str(R).replace('.', ''), grooming_label, folding_label, min_pt, max_pt) + name_min = 'h1_min_folded_theta_g_ch_MPIon_R{}_{}_{}_pT_{}_{}'.format( + str(R).replace('.', ''), grooming_label, folding_label, min_pt, max_pt) + name_max = 'h1_max_folded_theta_g_ch_MPIon_R{}_{}_{}_pT_{}_{}'.format( + str(R).replace('.', ''), grooming_label, folding_label, min_pt, max_pt) + + h_theory_cent = f_theory.Get(name_cent) + h_theory_min = f_theory.Get(name_min) + h_theory_max = f_theory.Get(name_max) + h_theory_cent.SetDirectory(0) + h_theory_min.SetDirectory(0) + h_theory_max.SetDirectory(0) + + # Rebin to data binning + h_theory_cent_rebinned = self.rebin_theory(h_theory_cent, self.h) + h_theory_min_rebinned = self.rebin_theory(h_theory_min, self.h) + h_theory_max_rebinned = self.rebin_theory(h_theory_max, self.h) + + h_theory_cent_rebinned = self.remove_negative_bin_edges(h_theory_cent_rebinned) + h_theory_min_rebinned = self.remove_negative_bin_edges(h_theory_min_rebinned) + h_theory_max_rebinned = self.remove_negative_bin_edges(h_theory_max_rebinned) + + # Normalize such that integral in perturbative region is 1 + min_bin = h_theory_cent_rebinned.FindBin(self.theta_np[min_pt][R][zcut][beta]) + 1 + #if self.h.GetBinCenter(min_bin) <= self.theta_np[min_pt][R][zcut][beta]: + # min_bin += 1 + if min_bin > n: + # At least normalize by one bin (the last bin) + min_bin = n + integral_perturbative_theory = h_theory_cent_rebinned.Integral(min_bin, n, 'width') + integral_total = h_theory_cent_rebinned.Integral(1, n, 'width') + + h_theory_cent_rebinned.Scale(1./integral_perturbative_theory) + h_theory_min_rebinned.Scale(1./integral_perturbative_theory) + h_theory_max_rebinned.Scale(1./integral_perturbative_theory) + + # Scale additionally for visualization + self.scale_histogram_for_visualization(h_theory_cent_rebinned, R, min_pt, zcut, beta) + self.scale_histogram_for_visualization(h_theory_min_rebinned, R, min_pt, zcut, beta) + self.scale_histogram_for_visualization(h_theory_max_rebinned, R, min_pt, zcut, beta) + + n_theory_bins = h_theory_cent_rebinned.GetNbinsX() + x = np.array([h_theory_cent_rebinned.GetXaxis().GetBinCenter(i) \ + for i in range(1, n_theory_bins+1)]) + y = np.array([h_theory_cent_rebinned.GetBinContent(i) \ + for i in range(1, n_theory_bins+1)]) + xerrup = xerrdn = np.array([0. for i in range(n_theory_bins)]) + yerrup = np.array([h_theory_max_rebinned.GetBinContent(i)-y[i-1] \ + for i in range(1, n_theory_bins+1)]) + yerrdn = np.array([y[i-1]-h_theory_min_rebinned.GetBinContent(i) \ + for i in range(1, n_theory_bins+1)]) + g_theory = ROOT.TGraphAsymmErrors(n_theory_bins, x, y, xerrdn, xerrup, yerrdn, yerrup) + g_theory_name = 'g_theory_%i_%s' % (i, grooming_label) + g_theory.SetNameTitle(g_theory_name, g_theory_name) + self.g_theory_dict[beta].append(g_theory) + + # Construct ratios in self.h_ratio_dict, self.g_ratio_dict + self.construct_ratio(self.h, self.h_sys, h_theory_cent_rebinned, h_theory_min_rebinned, + h_theory_max_rebinned, n_theory_bins, x, xerrup, xerrdn, y, yerrup, yerrdn, + R, zcut, beta, i, pad) + + f_data.Close() + f_theory.Close() + + # Plot overlay of alpha values + self.plot_beta_overlay(c, pad, R, zcut, beta, min_pt, max_pt) + + # Keep histograms in memory + self.plot_list.append(self.h) + self.plot_list.append(self.h_sys) + self.plot_list.append(self.g_theory_dict) + self.plot_list.append(self.h_ratio_dict) + self.plot_list.append(self.g_ratio_dict) + self.plot_list.append(self.blank_histo_list) + + #------------------------------------------------------------------------------------------- + # Draw beta histograms in given pad + #------------------------------------------------------------------------------------------- + def plot_beta_overlay(self, c, pad, R, zcut, beta, min_pt, max_pt): + + self.logy = False + + # Create canvas + c.cd(pad) + + grooming_label = "zcut%s_B%s" % (str(zcut).replace('.',''), beta) + + # Set pad to plot distributions + setattr(self, "pad1_{}_PtBin{}-{}_{}_{}".format(R, min_pt, max_pt, pad, grooming_label), + ROOT.TPad("pad1_{}_PtBin{}-{}_{}_{}".format(R, min_pt, max_pt, pad, grooming_label), + "pad1_{}_PtBin{}-{}_{}_{}".format(R, min_pt, max_pt, pad, grooming_label), + 0, self.bottom_offset + self.ratio_height,1,1)) + pad1 = getattr(self, "pad1_{}_PtBin{}-{}_{}_{}".format(R, min_pt, max_pt, pad, grooming_label)) + self.plot_list.append(pad1) + if pad in [1]: + pad1.SetLeftMargin(self.left_offset) + else: + pad1.SetLeftMargin(0.) + pad1.SetRightMargin(0.) + pad1.SetTopMargin(0.0) + pad1.SetBottomMargin(0.) + pad1.SetTicks(2,2) + if self.logy: + pad1.SetLogy() + if self.logx: + pad1.SetLogx() + pad1.Draw() + pad1.cd() + + # Draw blank histos + blankname = 'myBlankHisto_{}_PtBin{}-{}_{}'.format(pad, min_pt, max_pt, R) + xmax = self.h.GetXaxis().GetBinUpEdge(self.h.GetXaxis().GetNbins()) + myBlankHisto = ROOT.TH1F(blankname,blankname, 1, self.xmin, xmax) + myBlankHisto.SetNdivisions(505) + myBlankHisto.SetYTitle(self.ytitle) + myBlankHisto.GetYaxis().SetTitleSize(0.09) + myBlankHisto.GetYaxis().SetTitleOffset(1.4) + myBlankHisto.GetYaxis().SetLabelSize(0.06) + myBlankHisto.SetMinimum(0.001) + myBlankHisto.SetMaximum(self.ymax) + + myBlankHisto.Draw() + self.blank_histo_list.append(myBlankHisto) + + scale_factor = 1. + if pad in [1]: + shift = 0.0 + shift2 = 0.0 + else: + shift = -0.08 + shift2 = -0.15 - shift + + # Legend for the center pad (2) + leg = ROOT.TLegend(0.2+shift, 0.7, 0.6, 0.96) + size = 0.08 + self.setupLegend(leg, size) + + self.plot_list.append(leg) + + leg3 = ROOT.TLegend(0.2+shift/2, 0.72, 0.55, 0.96) + if pad == 2: + self.setupLegend(leg3, size) + self.plot_list.append(leg3) + + line_np = ROOT.TLine(self.theta_np[min_pt][R][zcut][beta], 0, + self.theta_np[min_pt][R][zcut][beta], self.ymax*0.6) + line_np.SetLineColor(self.colors[-1]) + line_np.SetLineStyle(2) + line_np.SetLineWidth(2) + self.plot_list.append(line_np) + + # Draw data + self.h.SetMarkerColor(self.color_data) + self.h.SetLineColor(self.color_data) + self.h.SetLineWidth(2) + self.h.SetMarkerStyle(self.marker_data) + self.h.SetMarkerSize(self.marker_size) + + self.h_sys.SetLineColor(0) + self.h_sys.SetMarkerSize(0) + self.h_sys.SetMarkerColor(0) + self.h_sys.SetFillColor(self.color_data) + self.h_sys.SetFillColorAlpha(self.color_data, 0.3) + self.h_sys.SetFillStyle(1001) + self.h_sys.SetLineWidth(0) + + leg.AddEntry(self.h, 'Data','PE') + + # Draw theory + for i, folding_label in enumerate(self.folding_labels): + + self.g_theory_dict[beta][i].SetFillColorAlpha(self.colors[i], 0.25) + self.g_theory_dict[beta][i].SetLineColor(self.colors[i]) + self.g_theory_dict[beta][i].SetLineWidth(3) + self.g_theory_dict[beta][i].Draw('L 3 same') + + leg.AddEntry(self.g_theory_dict[beta][i], 'NLL #otimes '+folding_label, 'lf') + + self.h_sys.Draw('E2 same') + line_np.Draw() + self.h.Draw('PE X0 same') + + if pad == 3: + leg.Draw('same') + if pad == 2: + leg3.Draw('same') + + if pad == 2: + leg3.AddEntry(line_np, "#it{#theta}_{g}^{NP} #leq " + \ + "(#frac{#Lambda}{#it{z}_{cut} #it{p}_{T} #it{R}})^" + \ + "{#frac{1}{1+#it{#beta}}}", 'lf') + leg3.AddEntry(self.h_sys, 'Syst. uncertainty', 'f') + + # Reset for ratio plot + self.logy = True + + # # # # # # # # # # # # # # # # # # # # # # # # + # text + # # # # # # # # # # # # # # # # # # # # # # # # + ymax = 0.93 + dy = 0.07 + x = 0.45 + shift + shift2 + + if pad == 1: + system0 = ROOT.TLatex(x,ymax,'ALICE Preliminary') + system0.SetNDC() + system0.SetTextSize(size / 1.2)#*scale_factor) + system0.Draw() + + system1 = ROOT.TLatex(x,ymax-dy,'pp #sqrt{#it{s}} = 5.02 TeV') + system1.SetNDC() + system1.SetTextSize(size / 1.2)#*scale_factor) + system1.Draw() + + system2 = ROOT.TLatex(x,ymax-2*dy,'charged jets anti-#it{k}_{T}') + system2.SetNDC() + system2.SetTextSize(size / 1.2)#*scale_factor) + system2.Draw() + + system3 = ROOT.TLatex(x,ymax-3*dy, + '#it{{R}} = {} |#it{{#eta}}_{{jet}}| < {}'.format(R, 0.9-R)) + system3.SetNDC() + system3.SetTextSize(size / 1.2)#*scale_factor) + system3.Draw() + + system4 = ROOT.TLatex(x,ymax-4.*dy-0.02, + str(min_pt) + ' < #it{p}_{T}^{ch jet} < ' + \ + str(max_pt) + ' GeV/#it{c}') + system4.SetNDC() + system4.SetTextSize(size / 1.2)#*scale_factor) + system4.Draw() + + self.plot_list.append(system0) + self.plot_list.append(system1) + self.plot_list.append(system2) + self.plot_list.append(system3) + self.plot_list.append(system4) + + #system5y = ymax-6.*dy if pad == 1 else ymax-4.6*dy + system5y = ymax-6.*dy + system5x = x+0.35 if pad == 1 else x+0.4 if pad==2 else x+0.45 + system5 = ROOT.TLatex(system5x, system5y, + '#it{{#beta}} = {}{}'.format(beta, self.scale_label)) + system5.SetNDC() + if pad in [1]: + beta_size = size / 1.3 + else: + beta_size = size + system5.SetTextSize(beta_size) + system5.Draw() + self.plot_list.append(system5) + + if pad == 2: + system5 = ROOT.TLatex(0.2+(shift/2), ymax-3.8*dy, + 'Soft drop: #it{z}_{cut} = %s' % str(zcut)) + system5.SetNDC() + system5.SetTextSize(size) + system5.Draw() + self.plot_list.append(system5) + + # Set pad for ratio + c.cd(pad) + pad2 = ROOT.TPad("pad2_{}".format(R), "pad2{}".format(R), + 0,0,1,self.bottom_offset+self.ratio_height) + self.plot_list.append(pad2) + if pad in [1]: + pad2.SetLeftMargin(self.left_offset) + else: + pad2.SetLeftMargin(0.) + pad2.SetRightMargin(0.) + pad2.SetTopMargin(0.) + pad2.SetBottomMargin(self.bottom_offset/(self.bottom_offset+self.ratio_height)) + pad2.SetTicks(1,2) + if self.logy: + pad2.SetLogy() + if self.logx: + pad2.SetLogx() + pad2.Draw() + pad2.cd() + + # Draw blank histos + blankname = 'myBlankHisto2_{}_PtBin{}-{}_{}'.format(pad, min_pt, max_pt, R) + myBlankHisto2 = ROOT.TH1F(blankname,blankname, 1, self.xmin, xmax-0.001) + myBlankHisto2.SetMinimum(self.ymin_ratio) + myBlankHisto2.SetMaximum(self.ymax_ratio) + myBlankHisto2.SetNdivisions(510, "y") + myBlankHisto2.SetNdivisions(505, "x") + myBlankHisto2.SetXTitle(self.xtitle) + myBlankHisto2.SetYTitle('#frac{Data}{Theory}') + myBlankHisto2.GetXaxis().SetTitleSize(0.15) + myBlankHisto2.GetXaxis().SetTitleOffset(0.7) + myBlankHisto2.GetXaxis().SetLabelSize(0.1) + myBlankHisto2.GetYaxis().SetTitleSize(0.12) + myBlankHisto2.GetYaxis().SetTitleOffset(1.) + myBlankHisto2.GetYaxis().SetLabelSize(0.1) + myBlankHisto2.Draw() + self.blank_histo_list.append(myBlankHisto2) + + line = ROOT.TLine(self.xmin,1,xmax,1) + line.SetLineColor(1) + line.SetLineStyle(2) + line.Draw('same') + self.plot_list.append(line) + + if pad in [1]: + # Add y-axis numerical label text because ROOT is incapable + ymin_text = ROOT.TLatex(0.327, 0.42, '0.5') + ymin_text.SetNDC() + ymax_text = ROOT.TLatex(0.367, 0.89, '2') + ymax_text.SetNDC() + + ymin_text.SetTextSize(0.1) + ymin_text.Draw() + self.plot_list.append(ymin_text) + + ymax_text.SetTextSize(0.1) + ymax_text.Draw() + self.plot_list.append(ymax_text) + + # Draw ratio + for i, folding_label in enumerate(self.folding_labels): + + # Draw tgraph with sys uncertainty + self.g_ratio_dict[beta][i].SetFillColorAlpha(self.colors[i], 0.25) + self.g_ratio_dict[beta][i].SetLineColor(self.colors[i]) + self.g_ratio_dict[beta][i].SetLineWidth(3) + self.g_ratio_dict[beta][i].Draw('3 same') + + # Draw th1 with stat uncertainty + self.h_ratio_dict[beta][i].SetMarkerColorAlpha(self.colors[i], self.alpha) + self.h_ratio_dict[beta][i].SetLineColorAlpha(self.colors[i], self.alpha) + self.h_ratio_dict[beta][i].SetFillColorAlpha(self.colors[i], self.alpha) + self.h_ratio_dict[beta][i].SetLineColor(self.colors[i]) + self.h_ratio_dict[beta][i].SetLineWidth(2) + self.h_ratio_dict[beta][i].SetMarkerStyle(self.markers[i]) + self.h_ratio_dict[beta][i].SetMarkerSize(self.marker_size) + self.h_ratio_dict[beta][i].Draw('PE same') + + line_np_ratio = ROOT.TLine( + self.theta_np[min_pt][R][zcut][beta], self.ymin_ratio, + self.theta_np[min_pt][R][zcut][beta], self.ymax_ratio) + line_np_ratio.SetLineColor(self.colors[-1]) + line_np_ratio.SetLineStyle(2) + line_np_ratio.SetLineWidth(2) + line_np_ratio.Draw() + self.plot_list.append(line_np_ratio) + + #------------------------------------------------------------------------------------------- + # Construct ratio data/theory as TGraph + # Fills: + # - self.h_ratio_list with histogram of ratio with stat uncertainties + # - self.g_theory_dict with tgraph of ratio with sys uncertainties + #------------------------------------------------------------------------------------------- + def construct_ratio(self, h, h_sys, h_theory_cent, h_theory_min, h_theory_max, n, x, + xerrup, xerrdn, y, yerrup, yerrdn, R, zcut, beta, i, pad): + + grooming_label = "zcut%s_B%s" % (str(zcut).replace('.', ''), str(beta)) + + # Construct central value + h_trim = self.trim_data(h, h_theory_cent) + h_ratio = h_trim.Clone() + h_ratio.SetName('{}_{}_{}_{}'.format(h_ratio.GetName(), R, grooming_label, pad)) + h_ratio.SetDirectory(0) + h_ratio.Divide(h_theory_cent) + self.plot_list.append(h_ratio) + self.h_ratio_dict[beta].append(h_ratio) + y_ratio = np.array([h_ratio.GetBinContent(i) for i in range(1, h_trim.GetNbinsX()+1)]) + + # Construct systematic uncertainties: combine data and theory uncertainties + + # Get relative systematic from data + y_data = np.array([h_trim.GetBinContent(i) for i in range(1, h_trim.GetNbinsX()+1)]) + h_sys_trim = self.trim_data(h_sys, h_theory_cent) + y_sys_data = np.array([h_sys_trim.GetBinError(i) for i in range(1, h_trim.GetNbinsX()+1)]) + y_sys_data_relative = np.divide(y_sys_data, y_data) + + # Get relative systematics from theory + yerr_up_relative = np.divide(yerrup, y) + yerr_dn_relative = np.divide(yerrdn, y) + + # Trim according to theta_g cut + #if self.thetag_cut != 0: + # yerr_up_relative = [yerr_up_relative[i] for i in range(len(yerr_up_relative)) \ + # if len(yerr_up_relative)-len(y_data) <= i] + # yerr_dn_relative = [yerr_dn_relative[i] for i in range(len(yerr_dn_relative)) \ + # if len(yerr_dn_relative)-len(y_data) <= i] + + # Combine systematics in quadrature + y_sys_total_up_relative = np.sqrt( np.square(y_sys_data_relative) + \ + np.square(yerr_up_relative)) + y_sys_total_dn_relative = np.sqrt( np.square(y_sys_data_relative) + \ + np.square(yerr_dn_relative)) + + y_sys_total_up = np.multiply(y_sys_total_up_relative, y_ratio) + y_sys_total_dn = np.multiply(y_sys_total_dn_relative, y_ratio) + + # Note: invert direction of asymmetric uncertainty + g_ratio = ROOT.TGraphAsymmErrors(n, x, y_ratio, xerrdn, xerrup, + y_sys_total_up, y_sys_total_dn) + g_ratio.SetName('g_ratio_{}_{}'.format(i, grooming_label)) + self.g_ratio_dict[beta].append(g_ratio) + + #------------------------------------------------------------------------------------------- + # Rebin theory histogram according to data binning + # Set statistical uncertainties to 0 (since we will neglect them) + #------------------------------------------------------------------------------------------- + def rebin_theory(self, h_theory, h): + + xbins = array('d', [h.GetBinLowEdge(bi) for bi in range(1, h.GetNbinsX()+2) \ + if h.GetBinLowEdge(bi) >= self.thetag_cut]) + n = len(xbins) - 1 + + for bi in range(1, h_theory.GetNbinsX()+2): + # Undo scaling by bin width before rebinning + h_theory.SetBinContent(bi, h_theory.GetBinContent(bi) * h_theory.GetBinWidth(bi)) + + h_theory_rebinned = h_theory.Rebin(n, h_theory.GetName()+'_rebinned', xbins) + h_theory_rebinned.SetDirectory(0) + h_theory_rebinned.Scale(1., 'width') + + for bi in range(1, n+2): + h_theory_rebinned.SetBinError(bi, 0) + + return h_theory_rebinned + + #------------------------------------------------------------------------------------------- + # Rebin data histogram according to theory binning + # Ensures that theta_g cut does not affect ratio plots + #------------------------------------------------------------------------------------------- + def trim_data(self, h, h_theory): + + xbins = array('d', [h_theory.GetBinLowEdge(bi) \ + for bi in range(1, h_theory.GetNbinsX()+2)]) + n = len(xbins) - 1 + h_trimmed = h.Rebin(n, h.GetName()+'_trimmed', xbins) + return h_trimmed + + #------------------------------------------------------------------------------------------- + # Remove bins from histogram that have negative edges for plotting purposes + #------------------------------------------------------------------------------------------- + def remove_negative_bin_edges(self, h): + + n = h.GetNbinsX() + xbins = array('d', [h.GetBinLowEdge(bi) for bi in range(1, n+2)]) + + while xbins[0] < 0: + xbins = xbins[1:] + n -= 1 + + h_rebinned = h.Rebin(n, h.GetName()+'_negrm', xbins) + h_rebinned.SetDirectory(0) + + return h_rebinned + + #------------------------------------------------------------------------------------------- + # Scale vertical amplitude of histogram, for visualization + #------------------------------------------------------------------------------------------- + def scale_histogram_for_visualization(self, h, R, min_pt, zcut, beta): + + scale_factor = 1. + ''' Can implement formatting for theta_g later + if groomed: + if R == 0.2: + if min_pt == 20: + if alpha == '3': + scale_factor = 0.1 + #elif min_pt == 40: + #elif R == 0.4: + else: + if R == 0.2: + if min_pt == 20: + if alpha == '2': + scale_factor = 0.25 + elif alpha == '3': + scale_factor = 0.02 + elif min_pt == 40: + if alpha == '2': + scale_factor = 0.45 + elif alpha == '3': + scale_factor = 0.06 + elif min_pt == 60: + if alpha == '2': + scale_factor = 0.3 + elif alpha == '3': + scale_factor = 0.12 + elif min_pt == 80: + if alpha == '2': + scale_factor = 0.42 + if alpha == '3': + scale_factor = 0.035 + elif R == 0.4: + if min_pt == 20: + if alpha == '2': + scale_factor = 0.45 + elif alpha == '3': + scale_factor = 0.2 + elif min_pt == 40: + if alpha == '2': + scale_factor = 0.45 + elif alpha == '3': + scale_factor = 0.25 + elif min_pt == 60: + if alpha == '2': + scale_factor = 0.65 + elif alpha == '3': + scale_factor = 0.27 + elif min_pt == 80: + if alpha == '2': + scale_factor = 0.33 + elif alpha == '3': + scale_factor = 0.15 + ''' + + h.Scale(scale_factor) + + if math.isclose(scale_factor, 1.): + plot_label = '' + else: + plot_label = ' (#times{})'.format(scale_factor) + + return plot_label + + #------------------------------------------------------------------------------------------- + # Set legend parameters + #------------------------------------------------------------------------------------------- + def setupLegend(self, leg, textSize): + + leg.SetTextFont(42); + leg.SetBorderSize(0); + leg.SetFillStyle(0); + leg.SetFillColor(0); + leg.SetMargin(0.25); + leg.SetTextSize(textSize); + leg.SetEntrySeparation(0.5); + + #--------------------------------------------------------------- + # Remove periods from a label + #--------------------------------------------------------------- + def remove_periods(self, text): + + string = str(text) + return string.replace('.', '') + + #------------------------------------------------------------------------------------------- + def setOptions(self): + + font = 42 + + ROOT.gStyle.SetFrameBorderMode(0) + ROOT.gStyle.SetFrameFillColor(0) + ROOT.gStyle.SetCanvasBorderMode(0) + ROOT.gStyle.SetPadBorderMode(0) + ROOT.gStyle.SetPadColor(10) + ROOT.gStyle.SetCanvasColor(10) + ROOT.gStyle.SetTitleFillColor(10) + ROOT.gStyle.SetTitleBorderSize(1) + ROOT.gStyle.SetStatColor(10) + ROOT.gStyle.SetStatBorderSize(1) + ROOT.gStyle.SetLegendBorderSize(1) + + ROOT.gStyle.SetDrawBorder(0) + ROOT.gStyle.SetTextFont(font) + ROOT.gStyle.SetStatFont(font) + ROOT.gStyle.SetStatFontSize(0.05) + ROOT.gStyle.SetStatX(0.97) + ROOT.gStyle.SetStatY(0.98) + ROOT.gStyle.SetStatH(0.03) + ROOT.gStyle.SetStatW(0.3) + ROOT.gStyle.SetTickLength(0.02,"y") + ROOT.gStyle.SetEndErrorSize(3) + ROOT.gStyle.SetLabelSize(0.05,"xyz") + ROOT.gStyle.SetLabelFont(font,"xyz") + ROOT.gStyle.SetLabelOffset(0.01,"xyz") + ROOT.gStyle.SetTitleFont(font,"xyz") + ROOT.gStyle.SetTitleOffset(1.2,"xyz") + ROOT.gStyle.SetTitleSize(0.045,"xyz") + ROOT.gStyle.SetMarkerSize(1) + ROOT.gStyle.SetPalette(1) + + ROOT.gStyle.SetOptTitle(0) + ROOT.gStyle.SetOptStat(0) + ROOT.gStyle.SetOptFit(0) + +#------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------- +if __name__ == '__main__': + print('Executing plot_angularity.py...') + print('') + + # Define arguments + parser = argparse.ArgumentParser(description='Plot angularity') + parser.add_argument( + '-i', + '--inputDir', + action='store', + type=str, + metavar='inputDir', + default='.', + help='input directory containing ROOT file' + ) + parser.add_argument( + '-o', + '--outputDir', + action='store', + type=str, + metavar='outputDir', + default='.', + help='Output directory for output to be written to' + ) + + # Parse the arguments + args = parser.parse_args() + + analysis = PlotAngularityFigures(input_dir=args.inputDir, output_dir=args.outputDir) + analysis.plot_results() diff --git a/pyjetty/alice_analysis/analysis/user/james/plotting_utils_base.py b/pyjetty/alice_analysis/analysis/user/james/plotting_utils_base.py index 4f9d4a5a1..e37d31522 100755 --- a/pyjetty/alice_analysis/analysis/user/james/plotting_utils_base.py +++ b/pyjetty/alice_analysis/analysis/user/james/plotting_utils_base.py @@ -62,16 +62,16 @@ def __init__(self, output_dir = '.', config_file = '', R_max = None, thermal = F elif recluster_alg == 'AKT': self.reclustering_algorithm = 'anti-#it{k}_{T}' - main_data = config['main_data'] - main_response = config['main_response'] + self.main_data = config['main_data'] + self.main_response = config['main_response'] if thermal: - main_response = config['thermal_closure'] + self.main_response = config['thermal_closure'] - if os.path.exists(main_data): - self.fData = ROOT.TFile(main_data, 'READ') + if os.path.exists(self.main_data): + self.fData = ROOT.TFile(self.main_data, 'READ') else: self.fData = None - self.fMC = ROOT.TFile(main_response, 'READ') + self.fMC = ROOT.TFile(self.main_response, 'READ') if self.R_max: self.suffix = '_Rmax{}'.format(self.R_max) @@ -81,8 +81,10 @@ def __init__(self, output_dir = '.', config_file = '', R_max = None, thermal = F #self.ColorArray = [ROOT.kBlue-4, ROOT.kAzure+7, ROOT.kCyan-2, ROOT.kViolet-8, # ROOT.kBlue-6, ROOT.kGreen+3, ROOT.kPink-4, ROOT.kRed-4, # ROOT.kOrange-3] - self.ColorArray = [ROOT.kViolet-8, ROOT.kAzure-4, ROOT.kTeal-8, ROOT.kOrange+6, ROOT.kOrange-3, ROOT.kRed-7, ROOT.kPink+1, ROOT.kCyan-2, ROOT.kGray, ROOT.kBlue-4, ROOT.kAzure+7, ROOT.kBlue-6, 1] - + self.ColorArray = [ROOT.kViolet-8, ROOT.kAzure-4, ROOT.kTeal-8, ROOT.kOrange+6, + ROOT.kOrange-3, ROOT.kRed-7, ROOT.kPink+1, ROOT.kCyan-2, + ROOT.kGray, ROOT.kBlue-4, ROOT.kAzure+7, ROOT.kBlue-6, 1] + self.MarkerArray = [20, 21, 22, 23, 34, 33, 24, 25, 26, 32, 27, 28, 42] self.OpenMarkerArray = [24, 25, 26, 32, 27, 28, 42] @@ -103,7 +105,7 @@ def plot_DeltaR(self, jetR, jet_matching_distance): ROOT.gPad.SetRightMargin(0.15) c.SetLogz() - h.GetXaxis().SetTitle('#it{p}_{T,det}^{ch jet}') + h.GetXaxis().SetTitle('#it{p}_{T,det}^{ch jet} (GeV/#it{c})') h.GetYaxis().SetTitle('#DeltaR_{match}') x_max = 200. @@ -137,7 +139,7 @@ def plot_JES(self, jetR): if not histDeltaJES: name = 'hJES_R{}{}{}'.format(jetR, self.suffix, self.scaled_suffix) histDeltaJES = self.fMC.Get(name) - histDeltaJES.GetXaxis().SetTitle("#it{p}_{T}^{gen}") + histDeltaJES.GetXaxis().SetTitle("#it{p}_{T}^{gen} (GeV/#it{c})") histDeltaJES.GetYaxis().SetTitle("#frac{#it{p}_{T}^{det} - #it{p}_{T}^{gen}}{#it{p}_{T}^{gen}}") histDeltaJES.GetXaxis().SetRangeUser(0., 200.) outputFilename = os.path.join(self.output_dir, "histDeltaJES_R{}.pdf".format(self.remove_periods(jetR))) @@ -163,8 +165,9 @@ def plot_JES_proj(self, jetR, pt_bins): cJES = ROOT.TCanvas('cJES','cJES: hist',600,450) cJES.cd() + cJES.SetLeftMargin(0.2) cJES.SetBottomMargin(0.2) - + leg = ROOT.TLegend(0.55,0.55,0.88,0.85, '') leg.SetFillColor(10) leg.SetBorderSize(0) @@ -172,32 +175,42 @@ def plot_JES_proj(self, jetR, pt_bins): leg.SetTextSize(0.04) # Loop through pt slices, and plot final result for each 1D theta_g distribution + projections = [] + max = 0 for i in range(0, len(pt_bins) - 1): min_pt_truth = pt_bins[i] max_pt_truth = pt_bins[i+1] - + hJESProj = self.getJESshiftProj(name, 'hJESproj{}'.format(i), min_pt_truth, max_pt_truth) hJESProj.SetMarkerStyle(20) hJESProj.SetMarkerColor(self.ColorArray[i]) hJESProj.SetLineColor(self.ColorArray[i]) - + + projections.append(hJESProj) + new_max = hJESProj.GetMaximum() + if new_max > max: + max = new_max + + leg.AddEntry(hJESProj, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV/#it{{c}}'.format(min_pt_truth, max_pt_truth), 'P') + + for i in range(0, len(pt_bins) - 1): + hJESProj = projections[i] + if i == 0: - + hJESProj.GetXaxis().SetTitleOffset(1.6); hJESProj.GetYaxis().SetTitle('Probability density') hJESProj.GetXaxis().SetTitle('#frac{#it{p}_{T}^{det} - #it{p}_{T}^{gen}}{#it{p}_{T}^{gen}}') - - hJESProj.GetYaxis().SetRangeUser(0, 1.3*hJESProj.GetMaximum()) + + hJESProj.GetYaxis().SetRangeUser(0, 1.3*max) hJESProj.DrawCopy('P E') - + else: - + hJESProj.DrawCopy('P E same') - - leg.AddEntry(hJESProj, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV'.format(min_pt_truth, max_pt_truth), 'P') - + leg.Draw('same') - + outputFilename = os.path.join(self.output_dir, 'jet/histDeltaJESproj_R{}.pdf'.format(self.remove_periods(jetR))) cJES.SaveAs(outputFilename) cJES.Close() @@ -223,8 +236,11 @@ def getJESshiftProj(self, name, label, minPt, maxPt): def plotJER(self, jetR, obs_label): # (pt-det, pt-truth, theta_g-det, theta_g-truth) - name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format(self.observable, jetR, obs_label, self.suffix, self.scaled_suffix) + name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format( + self.observable, jetR, obs_label, self.suffix, self.scaled_suffix).replace("__", "_") hRM_4d = self.fMC.Get(name) + if not hRM_4d: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) hRM = hRM_4d.Projection(1,0) hRM.SetName('hResponse_JetPt_{}_R{}_{}_Proj'.format(self.observable, jetR, obs_label)) @@ -235,8 +251,9 @@ def plotJER(self, jetR, obs_label): # Create histo to be used to fill JER values nBins = histPtGenProf.GetNbinsX() - histJER = ROOT.TH1D('histJER_R{}_{}'.format(jetR, obs_label), 'histJER_R{}_{}'.format(jetR, obs_label), nBins, 0., 300.) # same binning for pT^gen as in task - + ptbins = array('d', [histPtGenProf.GetBinLowEdge(i) for i in range(1, nBins+2)]) + histJER = ROOT.TH1D('histJER_R{}_{}'.format(jetR, obs_label), 'histJER_R{}_{}'.format(jetR, obs_label), nBins, ptbins) # same binning for pT^gen as in task + # Loop through the bins, and fill the JER for i in range(0,nBins+1): sigma = histPtGenProf.GetBinError(i) @@ -245,9 +262,9 @@ def plotJER(self, jetR, obs_label): histJER.SetBinContent(i, JER) histJER.GetYaxis().SetTitle('#frac{#sigma(#it{p}_{T}^{gen})}{#it{p}_{T}^{gen}}') - histJER.GetXaxis().SetTitle('#it{p}_{T}^{gen}') + histJER.GetXaxis().SetTitle('#it{p}_{T}^{gen} (GeV/#it{c})') histJER.GetYaxis().SetRangeUser(-0.01, 0.5) - histJER.GetXaxis().SetRangeUser(5., 100.) + histJER.GetXaxis().SetRangeUser(5., 150.) outputFilename = os.path.join(self.output_dir, 'jet/histJER_R{}.pdf'.format(self.remove_periods(jetR))) histJER.SetMarkerStyle(21) histJER.SetMarkerColor(2) @@ -260,32 +277,49 @@ def plot_jet_reco_efficiency(self, jetR, obs_label): # For each pT^gen, compute the fraction of matched pT^gen # First, get the pT^gen spectrum - name = 'h_{}_JetPt_Truth_R{}_{}{}'.format(self.observable, jetR, obs_label, self.scaled_suffix) - histPtGen = self.fMC.Get(name).ProjectionX() - + name = 'h_{}_JetPt_Truth_R{}_{}{}'.format( + self.observable, jetR, obs_label, self.scaled_suffix).replace("_Scaled", "Scaled") + histPtGen = self.fMC.Get(name) + if not histPtGen: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) + histPtGen = histPtGen.ProjectionX() + hpg_xbins = [histPtGen.GetXaxis().GetBinLowEdge(i) for i in \ + range(1, histPtGen.GetXaxis().GetNbins()+2)] + # Then, get the pT^gen spectrum for matched jets - name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format(self.observable, jetR, obs_label, self.suffix, self.scaled_suffix) + name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format( + self.observable, jetR, obs_label, self.suffix, self.scaled_suffix).replace("__", "_") hRM_4d = self.fMC.Get(name) + if not hRM_4d: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) hRM = hRM_4d.Projection(1,0) hRM.SetName('hResponse_JetPt_{}_R{}_{}_Proj'.format(self.observable, jetR, obs_label)) histPtGenMatched = hRM.ProjectionY("_py",1,hRM.GetNbinsX()) #avoid under and overflow bins histPtGenMatched.SetName('histPtGenMatched_R{}_{}'.format(jetR, obs_label)) - + hpgm_xbins = [histPtGenMatched.GetXaxis().GetBinLowEdge(i) for i in \ + range(1, histPtGenMatched.GetXaxis().GetNbins()+2)] + + # Make sure before division that you have the same pT bins + if hpg_xbins != hpgm_xbins: + histPtGen = histPtGen.Rebin(len(hpgm_xbins) - 1, histPtGen.GetName() + "_rebin", + array('d', hpgm_xbins)) + # Compute the ratio histEfficiency = histPtGenMatched.Clone() histEfficiency.SetName('histEfficiency_{}'.format(name)) histEfficiency.Divide(histPtGenMatched, histPtGen, 1., 1., 'B') - - histEfficiency.GetXaxis().SetTitle('#it{p}_{T}^{gen}') + + histEfficiency.GetXaxis().SetTitle('#it{p}_{T}^{gen} (GeV/#it{c})') histEfficiency.GetYaxis().SetTitle('Efficiency') - histEfficiency.GetXaxis().SetRangeUser(0., 100.) + histEfficiency.GetXaxis().SetRangeUser(0., 150.) histEfficiency.GetYaxis().SetRangeUser(0., 1.2) histEfficiency.SetMarkerStyle(21) histEfficiency.SetMarkerColor(1) histEfficiency.SetMarkerSize(3) - outputFilename = os.path.join(self.output_dir, 'jet/hJetRecoEfficiency_R{}.pdf'.format(self.remove_periods(jetR))) + outputFilename = os.path.join( + self.output_dir, 'jet/hJetRecoEfficiency_R{}.pdf'.format(self.remove_periods(jetR))) self.plot_hist(histEfficiency, outputFilename) - + #--------------------------------------------------------------- def plot_obs_resolution(self, jetR, obs_label, xtitle, pt_bins): @@ -308,9 +342,12 @@ def plot_obs_resolution(self, jetR, obs_label, xtitle, pt_bins): self.setup_legend(myLegend,0.035) # (pt-det, pt-truth, theta_g-det, theta_g-truth) - name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format(self.observable, jetR, obs_label, self.suffix, self.scaled_suffix) + name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format( + self.observable, jetR, obs_label, self.suffix, self.scaled_suffix).replace("__", '_') hRM_4d = self.fMC.Get(name) - + if not hRM_4d: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) + h_list = [] # Store hists in a list, since otherwise it seems I lose the marker information # (removed from memory?) @@ -322,12 +359,12 @@ def plot_obs_resolution(self, jetR, obs_label, xtitle, pt_bins): hRM_4d_clone = hRM_4d.Clone() hRM_4d_clone.SetName('{}_{}'.format(hRM_4d_clone.GetName(), i)) hResolution = self.get_resolution(hRM_4d_clone, jetR, obs_label, min_pt_truth, max_pt_truth, 'hResolution_{}'.format(i)) - + hResolution.SetMarkerColor(self.ColorArray[i]) hResolution.SetMarkerStyle(21) hResolution.SetLineColor(self.ColorArray[i]) hResolution.DrawCopy('P same') - myLegend.AddEntry(hResolution, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV'.format(min_pt_truth, max_pt_truth), 'P') + myLegend.AddEntry(hResolution, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV/#it{{c}}'.format(min_pt_truth, max_pt_truth), 'P') h_list.append(hResolution) myLegend.Draw('same') @@ -369,8 +406,9 @@ def get_resolution(self, hRM_4d, jetR, obs_label, minPt, maxPt, label): #--------------------------------------------------------------- def plot_obs_residual_pt(self, jetR, obs_label, xtitle, pt_bins): - name = 'hResidual_JetPt_{}_R{}_{}{}{}'.format(self.observable, jetR, obs_label, self.suffix, self.scaled_suffix) - + name = 'hResidual_JetPt_{}_R{}_{}{}{}'.format( + self.observable, jetR, obs_label, self.suffix, self.scaled_suffix).replace("__", '_') + c_residual = ROOT.TCanvas('c','c: hist',600,450) c_residual.cd() c_residual.SetBottomMargin(0.2) @@ -385,46 +423,51 @@ def plot_obs_residual_pt(self, jetR, obs_label, xtitle, pt_bins): for i in range(0, len(pt_bins) - 1): min_pt_truth = pt_bins[i] max_pt_truth = pt_bins[i+1] - - hResidual = self.get_residual_proj(name, 'hResidual{}'.format(i), min_pt_truth, max_pt_truth, option='pt') + + hResidual = self.get_residual_proj( + name, 'hResidual{}'.format(i), min_pt_truth, max_pt_truth, option='pt') hResidual.SetMarkerStyle(self.MarkerArray[i]) hResidual.SetMarkerColor(self.ColorArray[i]) hResidual.SetLineColor(self.ColorArray[i]) - + if i == 0: - + hResidual.GetXaxis().SetTitleOffset(1.6); hResidual.GetYaxis().SetTitle('Probability density') hResidual.GetYaxis().SetRangeUser(0, 2.*hResidual.GetMaximum()) hResidual.DrawCopy('P E') - + else: hResidual.DrawCopy('P E same') - - leg.AddEntry(hResidual, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV'.format(min_pt_truth, max_pt_truth), 'P') + + leg.AddEntry( + hResidual, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV/#it{{c}}'.format(min_pt_truth, max_pt_truth), 'P') leg.Draw('same') - - outputFilename = os.path.join(self.output_dir, 'residual_pt/hResidual_R{}_{}.pdf'.format(self.remove_periods(jetR), obs_label)) + + outputFilename = os.path.join( + self.output_dir, 'residual_pt/hResidual_R%s_%s.pdf' % \ + (str(jetR).replace('.',''), obs_label)) c_residual.SaveAs(outputFilename) c_residual.Close() #--------------------------------------------------------------- def plot_obs_residual_obs(self, jetR, obs_label, xtitle): - name = 'hResidual_JetPt_{}_R{}_{}{}{}'.format(self.observable, jetR, obs_label, self.suffix, self.scaled_suffix) - + name = 'hResidual_JetPt_{}_R{}_{}{}{}'.format( + self.observable, jetR, obs_label, self.suffix, self.scaled_suffix).replace("__", '_') + c_residual = ROOT.TCanvas('c','c: hist',600,450) c_residual.cd() c_residual.SetBottomMargin(0.2) - + leg = ROOT.TLegend(0.55,0.55,0.88,0.85, '') leg.SetFillColor(10) leg.SetBorderSize(0) leg.SetFillStyle(1) leg.SetTextSize(0.035) - + # Loop through pt slices, and plot final residual for each 1D distribution min_pt = 80 max_pt = 100 @@ -435,53 +478,81 @@ def plot_obs_residual_obs(self, jetR, obs_label, xtitle): obs_true_list = [0.2, 0.3, 0.4, 0.5] elif 'z_{r}' in xtitle: obs_true_list = [0., 0.2, 0.4, 0.6, 0.8, 1.] - + elif 'lambda' in xtitle: + if "1" in obs_label: # alpha = 1, 1.5 + obs_true_list = [0, 0.05, 0.1, 0.2, 0.4, 0.7] + else: # alpha = 2, 3 + obs_true_list = [0, 0.05, 0.1, 0.2, 0.5] + elif "{m}_{jet}" in xtitle: + obs_true_list = [1, 5, 8, 12, 20] + + max = 0 + residuals = [] for i in range(0, len(obs_true_list) - 1): min_obs_truth = obs_true_list[i] max_obs_truth = obs_true_list[i+1] - - hResidual = self.get_residual_proj(name, 'hResidual{}'.format(i), min_obs_truth, max_obs_truth, option='obs', min_pt=min_pt, max_pt=max_pt) + + hResidual = self.get_residual_proj( + name, 'hResidual' + str(i), min_obs_truth, max_obs_truth, + option='obs', min_pt=min_pt, max_pt=max_pt) hResidual.SetMarkerStyle(self.MarkerArray[i]) hResidual.SetMarkerColor(self.ColorArray[i]) hResidual.SetLineColor(0) if self.thermal: hResidual.SetMarkerStyle(self.OpenMarkerArray[i]) + residuals.append(hResidual) + new_max = hResidual.GetMaximum() + if new_max > max: + max = new_max + + leg.AddEntry(hResidual, '{} = {}-{}'.format('{}^{{{}}}'.format(xtitle, 'truth'), + min_obs_truth, max_obs_truth), 'P') + + for i in range(0, len(obs_true_list) - 1): + hResidual = residuals[i] + if i == 0: - + hResidual.GetXaxis().SetTitleOffset(1.6); hResidual.GetYaxis().SetTitle('Probability density') - hResidual.GetYaxis().SetRangeUser(0, 2.*hResidual.GetMaximum()) + hResidual.GetYaxis().SetRangeUser(0, 2.*max) hResidual.DrawCopy('P E') - + else: hResidual.DrawCopy('P E same') - leg.AddEntry(hResidual, '{} = {}-{}'.format('{}^{{{}}}'.format(xtitle, 'truth'), min_obs_truth, max_obs_truth), 'P') - leg.Draw('same') - + text_latex = ROOT.TLatex() text_latex.SetNDC() - text = '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV'.format(min_pt, max_pt) + text = '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV/#it{{c}}'.format(min_pt, max_pt) text_latex.DrawLatex(0.2, 0.8, text) - - outputFilename = os.path.join(self.output_dir, 'residual_obs/hResidual_R{}_{}.pdf'.format(self.remove_periods(jetR), obs_label)) + + outputFilename = os.path.join( + self.output_dir, 'residual_obs/hResidual_R{}_{}.pdf'.format( + str(jetR).replace('.', ''), obs_label)) c_residual.SaveAs(outputFilename) c_residual.Close() #--------------------------------------------------------------- # Get residual for a fixed pT-gen def get_residual_proj(self, name, label, min, max, option='pt', min_pt=80., max_pt=100.): - + h_residual_pt = self.fMC.Get(name) + if not h_residual_pt: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) h_residual_pt.SetName('{}_{}'.format(h_residual_pt.GetName(), label)) - + if option == 'pt': h_residual_pt.GetXaxis().SetRangeUser(min, max) elif option == 'obs': h_residual_pt.GetXaxis().SetRangeUser(min_pt, max_pt) + # Make sure that we do not overset the range + top_edge = h_residual_pt.GetYaxis().GetBinUpEdge(h_residual_pt.GetYaxis().GetNbins()) + max = max if max <= top_edge else top_edge + min = min if min <= max else max h_residual_pt.GetYaxis().SetRangeUser(min, max) h_residual_pt.GetZaxis().SetRangeUser(-0.5, 0.5) h = h_residual_pt.Project3D('z') @@ -493,42 +564,56 @@ def get_residual_proj(self, name, label, min, max, option='pt', min_pt=80., max_ return h #--------------------------------------------------------------- - def plot_obs_projections(self, jetR, obs_label, obs_setting, grooming_setting, xtitle, pt_bins): + def plot_obs_projections(self, jetR, obs_label, obs_setting, grooming_setting, + xtitle, pt_bins): if not self.fData: return - + # (pt-det, pt-truth, obs-det, obs-truth) - name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format(self.observable, jetR, obs_label, self.suffix, self.scaled_suffix) + name = 'hResponse_JetPt_{}_R{}_{}{}{}'.format( + self.observable, jetR, obs_label, self.suffix, self.scaled_suffix).replace("__", '_') hRM_obs = self.fMC.Get(name) - if hRM_obs.GetSumw2() == 0: + if not hRM_obs: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) + if not hRM_obs.GetSumw2(): hRM_obs.Sumw2() - - name = 'h_{}_JetPt_R{}_{}{}'.format(self.observable, jetR, obs_label, self.suffix) + + name = 'h_{}_JetPt_R{}_{}{}'.format( + self.observable, jetR, obs_label, self.suffix).replace("__", '_') hObs_JetPt = self.fData.Get(name) - if hObs_JetPt.GetSumw2() == 0: + if not hObs_JetPt: + raise AttributeError("%s not found in file %s" % (name, self.main_data)) + if not hObs_JetPt.GetSumw2(): hObs_JetPt.Sumw2() # Plot 2D statistics in data self.plot2D_obs_statistics(hObs_JetPt.Clone(), jetR, obs_label) - + # Loop through pt slices, and plot: # (a) MC-det and MC-truth 1D distributions, for fixed pt-truth # (b) MC-det and data 1D distributions, for fixed pt-det for i in range(0, len(pt_bins) - 1): min_pt_truth = pt_bins[i] max_pt_truth = pt_bins[i+1] - - self.plot_obs_projection(hRM_obs, hObs_JetPt, jetR, obs_label, obs_setting, grooming_setting, xtitle, min_pt_truth, max_pt_truth, option='truth') - self.plot_obs_projection(hRM_obs, hObs_JetPt, jetR, obs_label, obs_setting, grooming_setting, xtitle, min_pt_truth, max_pt_truth, option='det') + + self.plot_obs_projection( + hRM_obs, hObs_JetPt, jetR, obs_label, obs_setting, grooming_setting, + xtitle, min_pt_truth, max_pt_truth, option='truth') + self.plot_obs_projection( + hRM_obs, hObs_JetPt, jetR, obs_label, obs_setting, grooming_setting, + xtitle, min_pt_truth, max_pt_truth, option='det') #--------------------------------------------------------------- def plot_obs_truth(self, jetR, obs_label, obs_setting, grooming_setting, xtitle, pt_bins): - - name = 'h_{}_JetPt_Truth_R{}_{}{}'.format(self.observable, jetR, obs_label, self.scaled_suffix) + + name = 'h_{}_JetPt_Truth_R{}_{}{}'.format( + self.observable, jetR, obs_label, self.scaled_suffix).replace("_Scaled", "Scaled") h2D = self.fMC.Get(name) - + if not h2D: + raise AttributeError("%s not found in file %s" % (name, self.main_response)) + for i in range(0, len(pt_bins) - 1): min_pt_truth = pt_bins[i] max_pt_truth = pt_bins[i+1] @@ -618,14 +703,30 @@ def plot2D_obs_statistics(self, hObs_JetPt, jetR, obs_label): c = ROOT.TCanvas('c','c: hist',600,450) c.cd() ROOT.gPad.SetLeftMargin(0.15) + ROOT.gPad.SetRightMargin(0.15) ROOT.gPad.SetBottomMargin(0.15) hObs_JetPt.SetMarkerSize(0.5) hObs_JetPt.GetYaxis().SetRangeUser(0, 1.) hObs_JetPt.GetXaxis().SetRangeUser(0, 100) - hObs_JetPt.RebinX(5) - hObs_JetPt.RebinY(5) - hObs_JetPt.Draw('text colz') + if self.observable != "ang": + hObs_JetPt.RebinX(5) + hObs_JetPt.RebinY(5) + h = hObs_JetPt + else: + ROOT.gPad.SetLogz(1) + # No need to plot 5-20 for now -- statistics mess up readability + xbins = [20, 40, 60, 80, 100, 150, 200] + ybins = [hObs_JetPt.GetYaxis().GetBinLowEdge(i) for i in range(1, hObs_JetPt.GetYaxis().GetNbins()+2)] + h = self.rebin_data( + hObs_JetPt, hObs_JetPt.GetName() + "_rebin", len(xbins) - 1, array('d', xbins), + len(ybins) - 1, array('d', ybins), move_underflow=False) + h.RebinY(5) + h.GetXaxis().SetTitle("#it{p}_{T}^{ch jet} (GeV/#it{c})") + alpha = obs_label.split("_")[0] + h.GetYaxis().SetTitle("#it{#lambda}_{#it{#alpha}=%s}^{#it{#kappa}=1}" % alpha) + + h.Draw('text colz') output_filename = os.path.join(self.output_dir, 'data/h2D_{}_statistics_R{}_{}.pdf'.format(self.observable, self.remove_periods(jetR), obs_label)) c.SaveAs(output_filename) @@ -634,10 +735,11 @@ def plot2D_obs_statistics(self, hObs_JetPt, jetR, obs_label): #--------------------------------------------------------------- # If option='truth', plot MC-truth and MC-det projections for fixed pt-true # If option='det', plot data and MC-det projections for fixed pt-det - def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, grooming_setting, xtitle, min_pt, max_pt, option='truth'): + def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, + grooming_setting, xtitle, min_pt, max_pt, option='truth'): + + ytitle = '#frac{1}{#it{#sigma}} #frac{d#it{#sigma}}{d%s}' % xtitle - ytitle = '#frac{{1}}{{N}} #frac{{dN}}{{d{}}}'.format(xtitle) - if self.observable == 'theta_g': rebin_val_mcdet = 5 rebin_val_mctruth = 5 @@ -654,13 +756,25 @@ def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, gro rebin_val_mcdet = 2 rebin_val_mctruth = 1 rebin_val_data = 5 - + elif self.observable == "ang": + rebin_val_mcdet = 2 + rebin_val_mctruth = 2 + rebin_val_data = 2 + elif self.observable == "mass": + rebin_val_mcdet = 2 + rebin_val_mctruth = 2 + rebin_val_data = 2 + if jetR == 0.2: + hRM.GetAxis(2).SetRangeUser(0, 16) + hRM.GetAxis(3).SetRangeUser(0, 16) + hObs_JetPt.GetYaxis().SetRangeUser(0, 16) + # Get RM, for a given pt cut if option == 'det': hRM.GetAxis(0).SetRangeUser(min_pt, max_pt) if option == 'truth': hRM.GetAxis(1).SetRangeUser(min_pt, max_pt) - + # Get histogram of observable at MC-det from RM hObs_det = hRM.Projection(2) hObs_det.SetName('hObs_det_{}'.format(obs_label)) @@ -684,7 +798,7 @@ def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, gro hObs_truth.Scale(1., 'width') if grooming_setting and 'sd' in grooming_setting: hObs_truth.GetXaxis().SetRange(0, hObs_truth.GetNbinsX()) - + # Get histogram of theta_g in data, for given pt-det cut if option == 'det': hObs_JetPt.GetXaxis().SetRangeUser(min_pt, max_pt) @@ -706,6 +820,10 @@ def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, gro myPad.SetTopMargin(0.07) myPad.SetRightMargin(0.04) myPad.SetBottomMargin(0.13) + logy = False + if self.observable == "ang" and grooming_setting and 'sd' in grooming_setting: + logy = True + myPad.SetLogy(logy) myPad.Draw() myPad.cd() @@ -716,8 +834,15 @@ def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, gro leg.SetTextSize(0.04) hObs_det.GetYaxis().SetTitleOffset(1.5) - hObs_det.SetMaximum(2.5*hObs_det.GetMaximum()) - hObs_det.SetMinimum(0.) + if logy: + hObs_det.SetMaximum(1e4*hObs_det.GetMaximum()) + hObs_det.SetMinimum(1e-4) + else: + if self.observable == "ang": + hObs_det.SetMaximum(2*hObs_det.GetMaximum()) + else: + hObs_det.SetMaximum(2.5*hObs_det.GetMaximum()) + hObs_det.SetMinimum(0.) hObs_det.Draw('hist') leg.AddEntry(hObs_det, "MC det", "L") @@ -748,7 +873,8 @@ def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, gro text = str(min_pt) + ' < #it{p}_{T, ch jet}^{det} < ' + str(max_pt) + ' GeV/#it{c}' text_latex.DrawLatex(0.3, 0.73, text) - text = '#it{R} = ' + str(jetR) + ' | #it{{#eta}}_{{jet}}| < {:.1f}'.format(self.eta_max - jetR) + text = '#it{R} = ' + str(jetR) + \ + ' | #it{{#eta}}_{{jet}}| < {:.1f}'.format(self.eta_max - jetR) text_latex.DrawLatex(0.3, 0.67, text) subobs_label = self.formatted_subobs_label(self.observable) @@ -762,7 +888,9 @@ def plot_obs_projection(self, hRM, hObs_JetPt, jetR, obs_label, obs_setting, gro text = self.formatted_grooming_label(grooming_setting, verbose = not self.groomer_studies) text_latex.DrawLatex(0.3, 0.61-delta, text) - output_filename = os.path.join(self.output_dir, 'mc_projections_{}/h_{}_MC_R{}_{}_{}-{}.pdf'.format(option, self.observable, self.remove_periods(jetR), obs_label, min_pt, max_pt)) + output_filename = os.path.join( + self.output_dir, 'mc_projections_{}/h_{}_MC_R{}_{}_{}-{}.pdf'.format( + option, self.observable, self.remove_periods(jetR), obs_label, min_pt, max_pt)) c.SaveAs(output_filename) c.Close() @@ -784,10 +912,10 @@ def plot_delta_pt_RC(self, jetR, CS_label): hDeltaPt.GetYaxis().SetTitle('#frac{dN}{d#delta#it{p}_{T}}') if 'after' in CS_label: min = 0 - hDeltaPt.GetXaxis().SetTitle('#delta#it{p}_{T} #equiv #it{p}_{T}^{RC}') + hDeltaPt.GetXaxis().SetTitle('#delta#it{p}_{T} #equiv #it{p}_{T}^{RC} (GeV/#it{c})') else: min = -50 - hDeltaPt.GetXaxis().SetTitle('#delta#it{p}_{T} #equiv #it{p}_{T}^{RC} - #pi#rho#it{R}^{2}') + hDeltaPt.GetXaxis().SetTitle('#delta#it{p}_{T} #equiv #it{p}_{T}^{RC} - #pi#rho#it{R}^{2} (GeV/#it{c})') hDeltaPt.GetXaxis().SetRangeUser(min, 50) hDeltaPt.GetYaxis().SetRangeUser(10, 100*hDeltaPt.GetMaximum()) @@ -828,7 +956,7 @@ def plot_delta_pt_emb(self, jetR, pt_bins): hDeltaPt.GetXaxis().SetTitleOffset(1.6); hDeltaPt.GetYaxis().SetTitleOffset(1.6); - hDeltaPt.GetXaxis().SetTitle('#delta#it{p}_{T} #equiv #it{p}_{T,jet}^{combined} - #it{p}_{T,jet}^{pp-det}') + hDeltaPt.GetXaxis().SetTitle('#delta#it{p}_{T} #equiv #it{p}_{T,jet}^{combined} - #it{p}_{T,jet}^{pp-det} (GeV/#it{c})') hDeltaPt.GetYaxis().SetTitle('#frac{dN}{d#delta#it{p}_{T}}') hDeltaPt.GetXaxis().SetRangeUser(-50, 50) hDeltaPt.DrawCopy('P E') @@ -837,12 +965,13 @@ def plot_delta_pt_emb(self, jetR, pt_bins): hDeltaPt.DrawCopy('P E same') - leg.AddEntry(hDeltaPt, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV'.format(min_pt_truth, max_pt_truth), 'P') + leg.AddEntry(hDeltaPt, '#it{{p}}_{{T}}^{{gen}} = {}-{} GeV/#it{{c}}'.format(min_pt_truth, max_pt_truth), 'P') mean = hDeltaPt.GetMean() std_dev = hDeltaPt.GetStdDev() text = 'Mean: {:.2f}, #sigma: {:.2f}'.format(mean, std_dev) - leg.AddEntry(None, text, '') + #leg.AddEntry(None, text, '') + leg.AddEntry(hDeltaPt, text, '') leg.Draw('same') diff --git a/pyjetty/alice_analysis/analysis/user/james/run_fold_theory_theta_g.py b/pyjetty/alice_analysis/analysis/user/james/run_fold_theory_theta_g.py new file mode 100755 index 000000000..6b11cf918 --- /dev/null +++ b/pyjetty/alice_analysis/analysis/user/james/run_fold_theory_theta_g.py @@ -0,0 +1,317 @@ +#! /usr/bin/env python + +""" +Code to do theory folding in order to compare to the measured distributions +The class 'TheoryFolding' below inherits from the 'TheoryFolding' class in: +pyjetty/alice_analysis/analysis/user/substructure/run_fold_theory.py +reynier@lbl.gov +""" + +import sys +import os +import argparse +from array import * +import numpy as np +import ROOT +ROOT.gSystem.Load("$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so") +import yaml + +from pyjetty.alice_analysis.analysis.user.substructure import run_fold_theory + +# Load pyjetty ROOT utils +ROOT.gSystem.Load('libpyjetty_rutil') + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) + +################################################################ +################################################################ +################################################################ +class TheoryFolding(run_fold_theory.TheoryFolding): + + def load_theory_curves(self): + + self.theory_scale_vars = {} + + # Loop over each jet R specified in the config file + for jetR in self.jetR_list: + scale_var = [] + + # Loop through subconfigurations to fold (e.g. in the jet-axis analysis there Standard_WTA, Standard_SD_1, ...) + for i, obs_setting in enumerate(self.obs_settings): + grooming_setting = self.grooming_settings[i] # grooming parameters + zcut = None; beta = None + if grooming_setting and type(obs_setting) == dict and 'zcut' in obs_setting.keys() \ + and 'beta' in obs_setting.keys(): + zcut = obs_setting["zcut"] + beta = obs_setting["beta"] + #label = ("zcut%s_B%s" % (str(zcut), str(beta))).replace('.','') + label = self.create_label(jetR, obs_setting, grooming_setting) + else: # Not SD grooming + continue + + pt_bins = array('d', self.theory_pt_bins) + + if self.theory_obs_bins: + obs_bins = array('d', self.theory_obs_bins) # bins which we want to have in the result + else: + obs_bins = array('d', getattr(self, 'binning_' + label)) + + # Add bin for underflow value (tagging fraction) + if grooming_setting and self.use_tagging_fraction: + obs_bins = np.insert(obs_bins, 0, -0.001) + + obs_width = np.subtract(obs_bins[1:], obs_bins[:-1]) + + # ----------------------------------------------------- + # Create histograms where theory curves will be stored + th_hists_no_scaling = [] # Basically a copy of the theory calculations, but binned + th_hists = [] # Histograms that will actually be used in the folding + hist_names = [] + + # ----------------------------------------------------- + # opening theory file by file and fill histograms + th_sub_dir = "tg_bt%s_zc%s" % (str(beta), str(zcut).replace('.','')) + th_path = os.path.join(self.theory_dir, th_sub_dir) + print('reading from files in:', th_path) + + # loop over pT bins + for p, pt in enumerate(pt_bins[:-1]): + pt_min = self.theory_pt_bins[p] + pt_max = self.theory_pt_bins[p+1] + + # Get scale factor for this pT bin. + # This reverses the self-normalization of 1/sigma for correct + # pT scaling when doing projections onto the y-axis. + scale_f = self.pt_scale_factor_jetR(pt, pt_bins[p+1], jetR) + + # load theory file, grab the data, and fill histograms with it + th_file = 'R_%s_pT_%i-%i.dat' % (str(jetR).replace('.',''), int(pt_min), int(pt_max)) + th_file = os.path.join(th_path, th_file) + + # ------------------------------------------------------------------------------------------------------------ + # Load data from theory file + with open(th_file) as f: + + lines = [line for line in f.read().split('\n') if line and line[0] != '#'] + x_val = [float(line.split()[0]) for line in lines] + + n_scale_variations = len(lines[0].split())-1 # number of scale variations + + # loop over scale variations and fill histograms + for sv in range(0, n_scale_variations): + y_val_n = [float(line.split()[sv+1]) for line in lines] + + # Interpolate the given values and return the value at the requested bin center + y_val_bin_ctr = self.interpolate_values_linear(x_val, y_val_n, obs_bins) + # Remove negative numbers + y_val_bin_ctr = [0 if val < 0 else val for val in y_val_bin_ctr] + + if p == 0: + hist_name = 'h2_input_%s_obs_pT_%s_sv%i' % (self.observable, label, sv) + hist_name_no_scaling = hist_name + '_no_scaling' + + th_hist = ROOT.TH2D(hist_name, ';p_{T}^{jet};%s' % (self.observable), + len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + + th_hist_no_scaling = ROOT.TH2D(hist_name_no_scaling, ';p_{T}^{jet};%s' % (self.observable), + len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + + th_hists.append(th_hist) + hist_names.append(hist_name) + th_hists_no_scaling.append(th_hist_no_scaling) + + # Save content into histogram before any scaling has been applied + # (to compare to the theory curves and make sure everything went fine) + for ob in range(0, len(obs_bins)-1): + th_hists_no_scaling[sv].SetBinContent(p+1, ob+1, y_val_bin_ctr[ob]) + + # Multiply by bin width and scale with pT-dependent factor + y_val_bin_ctr = np.multiply(y_val_bin_ctr, obs_width) + integral_y_val_bin_ctr = sum(y_val_bin_ctr) + y_val_bin_ctr = [ val * scale_f / integral_y_val_bin_ctr for val in y_val_bin_ctr ] + + # Save scaled content into the histograms + for ob in range(0, len(obs_bins)-1): + th_hists[sv].SetBinContent(p+1, ob+1, y_val_bin_ctr[ob]) + + f.close() + + # ------------------------------------------------------------------------------------------------------------ + new_obs_lab = ("zcut%s_B%s" % (str(zcut), str(beta))).replace('.','') + + # ------------------------------------------------------------------------------------------------------------ + for n_pt in range(0, len(self.final_pt_bins)-1): + histo_list = [] + for sv in range(0, n_scale_variations): + projection_name = 'h1_input_%s_R%s_%s_sv%i_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),obs_setting,sv,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1])) + + # Determine the bin number that corresponds to the pT edges given + min_bin, max_bin = self.bin_position( self.theory_pt_bins, self.final_pt_bins[n_pt], self.final_pt_bins[n_pt+1] ) + + h1_input_hist = th_hists[sv].ProjectionY(projection_name, min_bin, max_bin) + h1_input_hist.SetTitle(projection_name) + h1_input_hist.SetDirectory(0) + + # Undo the bin width scaling and set correct normalization + norm_factor = h1_input_hist.Integral() + if norm_factor == 0: norm_factor = 1 + h1_input_hist.Scale(1./norm_factor, "width") + + for b in range(0, h1_input_hist.GetNbinsX()): + h1_input_hist.SetBinError(b+1, 0) + + histo_list.append(h1_input_hist) + + # Create envelope histograms + hist_min, hist_max = self.min_max( histo_list ) + + # Rename some objects + name_h_cent = 'h1_input_%s_R%s_%s_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1])) + name_h_min = 'h1_min_input_%s_R%s_%s_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1])) + name_h_max = 'h1_max_input_%s_R%s_%s_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1])) + + h_central = histo_list[0] + h_central.SetName(name_h_cent) + hist_min .SetName(name_h_min ) + hist_max .SetName(name_h_max ) + + # Create a graph out of these histograms + graph_cent = self.histo_to_graph(h_central,hist_min,hist_max) + graph_min = ROOT.TGraph(hist_min) + graph_max = ROOT.TGraph(hist_max) + graph_frac = self.fractional_error(h_central,hist_min,hist_max) + + graph_cent.SetName('g_input_%s_R%s_%s_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1]))) + graph_min .SetName('g_min_input_%s_R%s_%s_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1]))) + graph_max .SetName('g_max_input_%s_R%s_%s_pT_%i_%i' % ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1]))) + graph_frac.SetName('g_frac_input_%s_R%s_%s_pT_%i_%i'% ( self.observable,(str)(jetR).replace('.',''),new_obs_lab,(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1]))) + + xtit = self.obs_label + ytit = '#frac{1}{#sigma} #frac{d#sigma}{d'+xtit+'}' + tit = 'input (hadron-level, no MPI) %i < #it{p}_{T}^{jet} < %i GeV/#it{c}'%((int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1])) + + self.pretty_1D_object(graph_cent,2,2,1,tit, xtit, ytit, True) + self.pretty_1D_object(graph_min ,1,1,2,tit, xtit, ytit) + self.pretty_1D_object(graph_max ,1,1,2,tit, xtit, ytit) + self.pretty_1D_object(graph_frac,2,2,1,tit, xtit, ytit, True) + + outpdfname = os.path.join(self.output_dir, 'control_plots' , 'processed_plots' ) + if not os.path.exists(outpdfname): + os.makedirs(outpdfname) + outpdfname_1 = os.path.join(outpdfname, 'theory_%s_pT_%i_%i_GeVc_input.pdf'%(self.create_label(jetR,obs_setting,grooming_setting),(int)(self.final_pt_bins[n_pt]),(int)(self.final_pt_bins[n_pt+1])) ) + self.plot_processed_functions( graph_cent, graph_min, graph_max, outpdfname_1) + + # loop over response files (e.g. Pythia, Herwig, ...) + for ri, response in enumerate(self.theory_response_files): + for lev in self.response_levels: + outpdfname_2 = os.path.join(outpdfname, 'comp_gen_input_theory_%s_pT_%i_%i_GeVc_' % \ + (self.create_label(jetR, obs_setting, grooming_setting), int(self.final_pt_bins[n_pt]), int(self.final_pt_bins[n_pt+1])) ) + outpdfname_2 += lev[0]+"_"+lev[1]+"_MPI"+lev[2]+"_"+self.theory_response_labels[ri]+".pdf" + self.plot_comparison_SCET_gen_input(graph_cent, jetR, obs_setting, grooming_setting, lev[0], lev[1], lev[2], \ + self.theory_response_labels[ri], self.final_pt_bins[n_pt], self.final_pt_bins[n_pt+1], outpdfname_2) + + self.outfile.cd() + h_central .Write() + hist_min .Write() + hist_max .Write() + graph_cent.Write() + graph_min .Write() + graph_max .Write() + graph_frac.Write() + + # ----------------------------------------------------- + # Setting the filled histograms as attributes + self.outfile.cd() + for sv in range(0,n_scale_variations): + setattr(self,hist_names[sv],th_hists[sv]) + + # Only save the 2D histograms for the central scale case + #th_hists_no_scaling[0].Write() + #th_hists[0].Write() + + outpdfname = os.path.join(self.output_dir, 'control_plots', 'input') + if not os.path.exists(outpdfname): + os.makedirs(outpdfname) + outpdfname = os.path.join(outpdfname, 'theory_input_%s.pdf' % label) + self.plot_input_theory(th_hists_no_scaling, th_hists, outpdfname) + + scale_var.append(n_scale_variations) + self.theory_scale_vars[jetR] = scale_var + + #---------------------------------------------------------------------- + # Plot input theory curves both as histograms and curves + #---------------------------------------------------------------------- + def plot_input_theory(self, h_list_no_scaling, h_list, outpdfname): + + for i in range(0,len(h_list_no_scaling)): + + c1 = ROOT.TCanvas('c1','c1',1000,800) + c1.Divide(2,2) + + for j in range(0,4): + c1.cd(j+1).SetLogz() + c1.cd(j+1).SetLeftMargin(0.20) + if j > 1: + c1.cd(j+1).SetTheta(50) + c1.cd(j+1).SetPhi(220) + else: + c1.cd(j+1).SetBottomMargin(0.20) + c1.cd(j+1).SetRightMargin(0.24) + + self.pretty_TH2D(h_list_no_scaling[i],'input theory curves, scale variation %i'%(i),'#it{p}_{T}^{jet} [GeV/#it{c}]',self.obs_label,'#frac{1}{#sigma} #frac{d#sigma}{d'+self.obs_label+'}') + self.pretty_TH2D(h_list [i],'scaled input, scale variation %i'%(i) ,'#it{p}_{T}^{jet} [GeV/#it{c}]',self.obs_label,'~ #frac{d#sigma}{d'+self.obs_label+'}') + + h_list_no_scaling[i].GetXaxis().SetTitleOffset(1.6) + h_list [i].GetXaxis().SetTitleOffset(1.6) + h_list_no_scaling[i].GetYaxis().SetTitleOffset(1.5) + h_list [i].GetYaxis().SetTitleOffset(1.5) + h_list_no_scaling[i].GetZaxis().SetTitleOffset(1.4) + h_list [i].GetZaxis().SetTitleOffset(1.4) + + c1.cd(1) + h_list_no_scaling[i].Draw('COLZ') + + c1.cd(2) + h_list[i].Draw('COLZ') + + c1.cd(3) + h_list_no_scaling[i].Draw('LEGO1') + + c1.cd(4) + h_list[i].Draw('LEGO1') + + c1.Draw() + + if len(h_list_no_scaling)==1: + c1.Print(outpdfname) + else: + if i == 0: c1.Print(outpdfname+'(') + elif i == len(h_list_no_scaling)-1: c1.Print(outpdfname+')') + else: c1.Print(outpdfname) + + del c1 + +#---------------------------------------------------------------------- +if __name__ == '__main__': + + # Define arguments + parser = argparse.ArgumentParser(description='Folding theory predictions') + parser.add_argument('-c', '--configFile', action='store', + type=str, metavar='configFile', + default='analysis_config.yaml', + help='Path of config file for analysis') + + # Parse the arguments + args = parser.parse_args() + + print('Configuring...') + print('configFile: \'{0}\''.format(args.configFile)) + + # If invalid configFile is given, exit + if not os.path.exists(args.configFile): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + analysis = TheoryFolding(config_file = args.configFile) + analysis.run_theory_folding() diff --git a/pyjetty/alice_analysis/analysis/user/substructure/analysis_utils_obs.py b/pyjetty/alice_analysis/analysis/user/substructure/analysis_utils_obs.py index 5387ad47e..e26b1d906 100755 --- a/pyjetty/alice_analysis/analysis/user/substructure/analysis_utils_obs.py +++ b/pyjetty/alice_analysis/analysis/user/substructure/analysis_utils_obs.py @@ -44,7 +44,9 @@ def formatted_subobs_label(self, observable): elif observable == 'jet_axis': return '#Delta #it{R}_{axis}' elif observable == 'ang': - return '#it{#beta}' + return '#it{#alpha}' + elif observable == 'mass': + return '#it{m}_{jet}' # Else observable not implemented return None @@ -146,16 +148,17 @@ def name_thn(self, observable, jetR, obs_label, R_max = None, prong_matching_res name = 'hResponse_JetPt_{}_R{}_{}_Rmax{}Scaled'.format(observable, jetR, obs_label, R_max) else: name = 'hResponse_JetPt_{}_R{}_{}Scaled'.format(observable, jetR, obs_label) - - return name + + return name.replace("__", '_').replace('_Scaled', 'Scaled') #--------------------------------------------------------------- # Get name of response THn, rebinned #--------------------------------------------------------------- def name_thn_rebinned(self, observable, jetR, obs_label): - return 'hResponse_JetPt_{}_R{}_{}_rebinned'.format(observable, jetR, obs_label) - + return 'hResponse_JetPt_{}_R{}_{}_rebinned'.format( + observable, jetR, obs_label).replace("__", '_') + #--------------------------------------------------------------- # Get name of 2D data histogram #--------------------------------------------------------------- @@ -163,18 +166,26 @@ def name_data(self, observable, jetR, obs_label, R_max = None, thermal_model = F if R_max: if thermal_model: - return 'h_{}_JetPt_R{}_{}_Rmax{}Scaled'.format(observable, jetR, obs_label, R_max) + return 'h_{}_JetPt_R{}_{}_Rmax{}Scaled'.format( + observable, jetR, obs_label, R_max).replace("__", '_').replace('_Scaled', 'Scaled') else: - return 'h_{}_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, obs_label, R_max) + return 'h_{}_JetPt_R{}_{}_Rmax{}'.format( + observable, jetR, obs_label, R_max).replace("__", '_') else: - return 'h_{}_JetPt_R{}_{}'.format(observable, jetR, obs_label) - + if obs_label: + return 'h_{}_JetPt_R{}_{}'.format( + observable, jetR, obs_label).replace("__", '_') + else: + return 'h_{}_JetPt_R{}'.format( + observable, jetR).replace("__", '_') + #--------------------------------------------------------------- # Get name of 2D data histogram, rebinned #--------------------------------------------------------------- def name_data_rebinned(self, observable, jetR, obs_label): - return 'h_{}_JetPt_R{}_{}_rebinned'.format(observable, jetR, obs_label) + return 'h_{}_JetPt_R{}_{}_rebinned'.format( + observable, jetR, obs_label).replace("__", '_') #--------------------------------------------------------------- # Get custom regularization parameter diff --git a/pyjetty/alice_analysis/analysis/user/substructure/roounfold_obs.py b/pyjetty/alice_analysis/analysis/user/substructure/roounfold_obs.py index 13ee9e5e7..bb51fc543 100644 --- a/pyjetty/alice_analysis/analysis/user/substructure/roounfold_obs.py +++ b/pyjetty/alice_analysis/analysis/user/substructure/roounfold_obs.py @@ -51,7 +51,7 @@ def __init__(self, observable='', input_file_data='', input_file_response='', co self.suffix = suffix self.initialize_config() - + self.histutils = ROOT.RUtil.HistUtils() self.get_responses(rebin_response) @@ -71,13 +71,13 @@ def __init__(self, observable='', input_file_data='', input_file_response='', co # Create output directories for unfolding plots self.create_output_dirs() - + self.ColorArray = [ROOT.kBlue-4, ROOT.kAzure+7, ROOT.kCyan-2, ROOT.kViolet-8, ROOT.kBlue-6, ROOT.kGreen+3, ROOT.kPink-4, ROOT.kRed-4, ROOT.kOrange-3] self.MarkerArray = [20, 21, 22, 23, 33, 34, 24, 25, 26, 32] - print(self) + #print(self) #--------------------------------------------------------------- # Main processing function @@ -125,6 +125,11 @@ def initialize_config(self): self.max_reg_param = self.obs_config_dict['common_settings']['max_reg_param'] self.use_max_reg_param = True + # Get shape variation parameter for closure test + self.prior_variation_option = config['prior_variation_option'] + self.shape_variation_parameter1 = config['prior1_variation_parameter'] + self.shape_variation_parameter2 = config['prior2_variation_parameter'] + # Retrieve histogram binnings for each observable setting for i, _ in enumerate(self.obs_subconfig_list): @@ -156,10 +161,7 @@ def initialize_config(self): n_obs_bins_truth = len(obs_bins_truth) - 1 det_obs_bin_array = array('d',obs_bins_det) truth_obs_bin_array = array('d',obs_bins_truth) - - self.xmin = self.obs_config_dict[config_name]['obs_bins_truth'][0] - self.xmax = self.obs_config_dict[config_name]['obs_bins_truth'][-1] - + # For SD, fill underflow bin to include untagged fraction in the unfolding # If underflow is activated, create a new underflow bin for the observable if grooming_setting: @@ -168,7 +170,7 @@ def initialize_config(self): n_obs_bins_det += 1 truth_obs_bin_array.insert(0, truth_obs_bin_array[0] - 0.1) n_obs_bins_truth += 1 - + setattr(self, 'n_bins_det_{}'.format(obs_label), n_obs_bins_det) setattr(self, 'n_bins_truth_{}'.format(obs_label), n_obs_bins_truth) setattr(self, 'det_bin_array_{}'.format(obs_label), det_obs_bin_array) @@ -178,23 +180,47 @@ def initialize_config(self): name_thn = self.utils.name_thn(self.observable, jetR, obs_label, self.R_max, self.prong_matching_response) name_thn_rebinned = self.utils.name_thn_rebinned(self.observable, jetR, obs_label) - name_data = self.utils.name_data(self.observable, jetR, obs_label, self.R_max, self.thermal_model) + name_thn_rebinned_shape1 = name_thn_rebinned + "_shape" + \ + self.utils.remove_periods(self.shape_variation_parameter1) + name_thn_rebinned_shape2 = name_thn_rebinned + "_shape" + \ + self.utils.remove_periods(self.shape_variation_parameter2) + + name_data = self.utils.name_data(self.observable, jetR, obs_label, self.R_max) name_data_rebinned = self.utils.name_data_rebinned(self.observable, jetR, obs_label) name_roounfold = 'roounfold_response_R{}_{}'.format(jetR, obs_label) + name_roounfold_shape1 = 'roounfold_response_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1), jetR, obs_label) + name_roounfold_shape2 = 'roounfold_response_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2), jetR, obs_label) + setattr(self, 'name_thn_R{}_{}'.format(jetR, obs_label), name_thn) setattr(self, 'name_thn_rebinned_R{}_{}'.format(jetR, obs_label), name_thn_rebinned) + setattr(self, 'name_thn_rebinned_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1), jetR, obs_label), + name_thn_rebinned_shape1) + setattr(self, 'name_thn_rebinned_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2), jetR, obs_label), + name_thn_rebinned_shape2) setattr(self, 'name_data_R{}_{}'.format(jetR, obs_label), name_data) setattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label), name_data_rebinned) setattr(self, 'name_roounfold_R{}_{}'.format(jetR, obs_label), name_roounfold) - - self.reg_param_name = 'n_iter' + setattr(self, 'name_roounfold_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1), jetR, obs_label), + name_roounfold_shape1) + setattr(self, 'name_roounfold_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2), jetR, obs_label), + name_roounfold_shape2) + + # In thermal case, we will use the MC instead of real data for the unfolding. + # Still, we need the real data above to use the correct uncertainties for smearing. + # Here, also load the "data" (MC) spectrum we will use for the actual unfolding. + if self.thermal_model: + name_data_thermal = self.utils.name_data(self.observable, jetR, obs_label, self.R_max, self.thermal_model) + setattr(self, 'name_data_thermal_R{}_{}'.format(jetR, obs_label), name_data_thermal) + + self.reg_param_name = '#it{n}_{iter}' self.errorType = ROOT.RooUnfold.kCovToy - - # Get shape variation parameter for closure test - self.prior_variation_option = config['prior_variation_option'] - self.shape_variation_parameter1 = config['prior1_variation_parameter'] - self.shape_variation_parameter2 = config['prior2_variation_parameter'] #--------------------------------------------------------------- # Get responses, either from file or manually rebin @@ -206,6 +232,8 @@ def get_responses(self, rebin_response=False): f = ROOT.TFile(response_file_name, 'RECREATE') f.Close() + use_histutils = True + # Rebin response matrix, and create RooUnfoldResponse object # THn response matrix is: (pt-det, pt-true, obs-det, obs-true) for jetR in self.jetR_list: @@ -217,8 +245,19 @@ def get_responses(self, rebin_response=False): name_thn = getattr(self, 'name_thn_R{}_{}'.format(jetR, obs_label)) name_thn_rebinned = getattr(self, 'name_thn_rebinned_R{}_{}'.format(jetR, obs_label)) + name_thn_rebinned_shape1 = getattr(self, 'name_thn_rebinned_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1), jetR, obs_label)) + name_thn_rebinned_shape2 = getattr(self, 'name_thn_rebinned_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2), jetR, obs_label)) name_data = getattr(self, 'name_data_R{}_{}'.format(jetR, obs_label)) + name_data_rebinned = getattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label)) name_roounfold = getattr(self, 'name_roounfold_R{}_{}'.format(jetR, obs_label)) + name_roounfold_shape1 = getattr(self, 'name_roounfold_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1), jetR, obs_label)) + name_roounfold_shape2 = getattr(self, 'name_roounfold_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2), jetR, obs_label)) + if self.thermal_model: + name_data_thermal = getattr(self, 'name_data_thermal_R{}_{}'.format(jetR, obs_label)) # Retrieve desired binnings n_pt_bins_det = getattr(self, 'n_pt_bins_det_{}'.format(obs_label)) @@ -230,7 +269,7 @@ def get_responses(self, rebin_response=False): det_bin_array = getattr(self, 'det_bin_array_{}'.format(obs_label)) n_bins_truth = getattr(self, 'n_bins_truth_{}'.format(obs_label)) truth_bin_array = getattr(self, 'truth_bin_array_{}'.format(obs_label)) - + # For SD, fill underflow bin to include untagged fraction in the unfolding # If underflow is activated, create a new underflow bin for the observable if grooming_setting: @@ -240,28 +279,102 @@ def get_responses(self, rebin_response=False): if self.prong_matching_response: move_underflow = False + # Get data histogram + hData = self.fData.Get(name_data) + + # Re-bin the data histogram + if use_histutils: + h = self.histutils.rebin_th2(hData, name_data, det_pt_bin_array, n_pt_bins_det, + det_bin_array, n_bins_det, move_underflow) + else: + h = self.utils.rebin_data(hData, name_data, n_pt_bins_det, det_pt_bin_array, + n_bins_det, det_bin_array, move_underflow=move_underflow) + + # If thermal model, smear MC input spectrum by measured data + # Then update data to be the correct spectrum + if self.thermal_model: + hDataThermal = self.fResponse.Get(name_data_thermal) + + # Re-bin the thermal "data" histogram + if use_histutils: + h_th = self.histutils.rebin_th2(hDataThermal, name_data_thermal, det_pt_bin_array, + n_pt_bins_det, det_bin_array, n_bins_det, move_underflow) + else: + h_th = self.utils.rebin_data(hDataThermal, name_data_thermal, n_pt_bins_det, det_pt_bin_array, + n_bins_det, det_bin_array, move_underflow=move_underflow) + + measuredErrors = self.getMeasuredErrors(h) + self.smearSpectrum(h_th, measuredErrors) + h = h_th + + h.SetDirectory(0) + name = getattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label)) + setattr(self, name, h) + # Rebin if requested, and write to file - use_histutils = True - thn = self.fResponse.Get(name_thn) - thn.SetName(name_thn) + try: + thn = self.fResponse.Get(name_thn) + thn.SetName(name_thn) + except AttributeError: + # Give a more helpful error message for debugging + raise AttributeError("%s not found in file %s" % (name_thn, self.input_file_response)) setattr(self, name_thn, thn) + if rebin_response: - - # Create rebinned THn and RooUnfoldResponse with these binnings, and write to file label = 'R{}_{}'.format(jetR, obs_label) + n_dim = 4 + + prior_variation_option = self.prior_variation_option + # Option 6 is to scale by data / MC-det + if int(self.prior_variation_option) == 6: + if self.prior_variation_parameter > 1e-8: # Don't scale in control case + hData_PerBin = getattr(self, name_data_rebinned) + + # Perform rebin of RM with no variation for taking ratio + thn_temp = self.histutils.rebin_thn( + response_file_name, thn, name_thn_rebinned, name_roounfold, n_dim, + n_pt_bins_det, det_pt_bin_array, n_bins_det, det_bin_array, + n_pt_bins_truth, truth_pt_bin_array, n_bins_truth, truth_bin_array, + label, 0., 1, move_underflow, self.use_miss_fake) + hMC_PerBin = thn_temp.Projection(2, 0) + hMC_PerBin.SetName("hMC_PerBin_{}".format(label)) + + prior_variation_option = hData_PerBin.Clone("hShapeVar_{}".format(label)) + prior_variation_option.Divide(hMC_PerBin) + + outputFile = ROOT.TFile("./test_output.root", "RECREATE"); + prior_variation_option.Write(); + + # Create rebinned THn and RooUnfoldResponse with these binnings, and write to file if use_histutils: - n_dim = 4 # If use_histutils, we use the Miss/Fake functionality of RooUnfold to # perform kinematic efficiency corrections - self.histutils.rebin_thn(response_file_name, thn, - name_thn_rebinned, name_roounfold, n_dim, - n_pt_bins_det, det_pt_bin_array, - n_bins_det, det_bin_array, - n_pt_bins_truth, truth_pt_bin_array, - n_bins_truth, truth_bin_array, - label, self.prior_variation_parameter, - self.prior_variation_option, move_underflow, - self.use_miss_fake) + rebinner = self.histutils.rebin_thn + if int(self.prior_variation_option) == 6 and self.prior_variation_parameter > 1e-8: + rebinner = self.histutils.rebin_thn_th2prior + + rebinner( + response_file_name, thn, name_thn_rebinned, name_roounfold, n_dim, + n_pt_bins_det, det_pt_bin_array, n_bins_det, det_bin_array, + n_pt_bins_truth, truth_pt_bin_array, n_bins_truth, truth_bin_array, + label, self.prior_variation_parameter, prior_variation_option, + move_underflow, self.use_miss_fake) + + # Also rebin response with shape scaling for shape closure tests + rebinner( + response_file_name, thn, name_thn_rebinned_shape1, name_roounfold_shape1, + n_dim, n_pt_bins_det, det_pt_bin_array, n_bins_det, det_bin_array, + n_pt_bins_truth, truth_pt_bin_array, n_bins_truth, truth_bin_array, + label, self.shape_variation_parameter1, prior_variation_option, + move_underflow, self.use_miss_fake) + + rebinner( + response_file_name, thn, name_thn_rebinned_shape2, name_roounfold_shape2, + n_dim, n_pt_bins_det, det_pt_bin_array, n_bins_det, det_bin_array, + n_pt_bins_truth, truth_pt_bin_array, n_bins_truth, truth_bin_array, + label, self.shape_variation_parameter2, prior_variation_option, + move_underflow, self.use_miss_fake) + else: # If not use_histutils, we apply a kinematic efficiency correction by hand # after unfolding. @@ -272,35 +385,26 @@ def get_responses(self, rebin_response=False): self.prior_variation_parameter, move_underflow=move_underflow, use_miss_fake=self.use_miss_fake) - # Get data histogram - hData = self.fData.Get(name_data) - - # If thermal model, smear input spectrum - if self.thermal_model: - measuredErrors = self.getMeasuredErrors(hData) - self.smearSpectrum(hData, measuredErrors) - - # Re-bin the data histogram - if use_histutils: - h = self.histutils.rebin_th2(hData, name_data, det_pt_bin_array, n_pt_bins_det, - det_bin_array, n_bins_det, move_underflow) - else: - h = self.utils.rebin_data(hData, name_data, n_pt_bins_det, det_pt_bin_array, - n_bins_det, det_bin_array, move_underflow=move_underflow) - - h.SetDirectory(0) - name = getattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label)) - setattr(self, name, h) - # Retrieve responses from file f = ROOT.TFile(response_file_name, 'READ') thn_rebinned = f.Get(name_thn_rebinned) #thn_rebinned.SetDirectory(0) + thn_rebinned_shape1 = f.Get(name_thn_rebinned_shape1) + thn_rebinned_shape2 = f.Get(name_thn_rebinned_shape2) + roounfold_response = f.Get(name_roounfold) roounfold_response.UseOverflow(False) + roounfold_response_shape1 = f.Get(name_roounfold_shape1) + roounfold_response_shape1.UseOverflow(False) + roounfold_response_shape2 = f.Get(name_roounfold_shape2) + roounfold_response_shape2.UseOverflow(False) setattr(self, name_thn_rebinned, thn_rebinned) + setattr(self, name_thn_rebinned_shape1, thn_rebinned_shape1) + setattr(self, name_thn_rebinned_shape2, thn_rebinned_shape2) setattr(self, name_roounfold, roounfold_response) + setattr(self, name_roounfold_shape1, roounfold_response_shape1) + setattr(self, name_roounfold_shape2, roounfold_response_shape2) f.Close() #--------------------------------------------------------------- @@ -310,14 +414,20 @@ def create_output_dirs(self): dirs = ['RM', 'Data', 'KinematicEfficiency', 'Unfolded_obs', 'Unfolded_pt', 'Unfolded_ratio', 'Unfolded_stat_uncert', 'Test_StatisticalClosure', - 'Test_ShapeClosure{}'.format(self.utils.remove_periods(self.shape_variation_parameter1)), - 'Test_ShapeClosure{}'.format(self.utils.remove_periods(self.shape_variation_parameter2)), + 'Test_ShapeClosure{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1)), + 'Test_ShapeClosure{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2)), + 'Test_PriorClosure{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1)), + 'Test_PriorClosure{}'.format( + self.utils.remove_periods(self.shape_variation_parameter2)), 'Test_Refolding', 'Correlation_Coefficients'] if not self.use_max_reg_param: dirs.append('Unfolded_ratio_to_final') if self.thermal_model: dirs.append('Test_ThermalClosure') - + for i in dirs: output_dir = os.path.join(self.output_dir, i) setattr(self, 'output_dir_{}'.format(i), output_dir) @@ -334,7 +444,7 @@ def unfold_single_setting(self, jetR, obs_label, obs_setting, grooming_setting): # Plot data 2D histogram name = getattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label)) hData_PerBin = getattr(self, name) - hData_PerBin.GetXaxis().SetTitle('#it{p}_{T,jet}') + hData_PerBin.GetXaxis().SetTitle('#it{p}_{T}^{ch jet} (GeV/#it{c})') hData_PerBin.GetYaxis().SetTitle(self.xtitle) output_dir = getattr(self, 'output_dir_Data') outf_name = 'hData_R{}_{}{}'.format(self.utils.remove_periods(jetR), @@ -344,19 +454,29 @@ def unfold_single_setting(self, jetR, obs_label, obs_setting, grooming_setting): # Plot various slices of the response matrix (from the THn) self.plot_RM_slices(jetR, obs_label, grooming_setting) - + # Plot the kinematic efficiency from the response THn, and save it as an attribute self.plot_kinematic_efficiency(jetR, obs_label, obs_setting, grooming_setting) # Get MC-det and MC-truth 2D projections for unfolding closure test name = 'hMC_Det_R{}_{}'.format(jetR, obs_label) - hMC_Det = self.get_MCdet2D(jetR, obs_label) + hMC_Det = self.get_MCdet2D(jetR, obs_label, name) setattr(self, name, hMC_Det) name = 'hMC_Truth_R{}_{}'.format(jetR, obs_label) - hMC_Truth = self.get_MCtruth2D(jetR, obs_label) + hMC_Truth = self.get_MCtruth2D(jetR, obs_label, name) setattr(self, name, hMC_Truth) + # Also get smeared MC det/tru for closure tests + for s in [self.shape_variation_parameter1, self.shape_variation_parameter2]: + name = 'hMC_Det_R{}_{}_shape{}'.format(jetR, obs_label, str(s).replace('.', '')) + hMC_Det = self.get_MCdet2D(jetR, obs_label, name, s) + setattr(self, name, hMC_Det) + + name = 'hMC_Truth_R{}_{}_shape{}'.format(jetR, obs_label, str(s).replace('.', '')) + hMC_Truth = self.get_MCtruth2D(jetR, obs_label, name, s) + setattr(self, name, hMC_Truth) + # Unfold spectrum if hData_PerBin and hMC_Det and hMC_Truth: @@ -440,16 +560,16 @@ def plot_unfolded_observable(self, jetR, obs_label, obs_setting, grooming_settin # Plot unfolded distribution for each successive iteration self.plot_observable(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth) - + # Plot ratio of unfolded result for each successive iteration self.plot_observable(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, option = 'ratio') - + # Plot ratio of each iteration to final iteration (only if manual reg param) if not self.use_max_reg_param: self.plot_observable(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, option = 'ratio_to_final', reg_param_final=reg_param_final) - + # Plot statistical uncertainties for each iteration self.plot_observable(jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, option = 'stat_uncert') @@ -473,14 +593,15 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, myPad.SetTopMargin(0.07) myPad.SetRightMargin(0.04) myPad.SetBottomMargin(0.13) + myPad.SetTicks(2, 2) myPad.Draw() myPad.cd() n_bins_truth = getattr(self, 'n_bins_truth_{}'.format(obs_label)) truth_bin_array = getattr(self, 'truth_bin_array_{}'.format(obs_label)) - leg = ROOT.TLegend(0.75,0.5,0.88,0.92) - self.utils.setup_legend(leg,0.04) + leg = ROOT.TLegend(0.75, 0.5, 0.88, 0.92) + self.utils.setup_legend(leg, 0.04) # Select final regularization parameter if self.use_max_reg_param: @@ -497,22 +618,22 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, if option == 'ratio': if i > 1: - h_previous = self.get_unfolded_result(jetR, obs_label, i-1, min_pt_truth, - max_pt_truth, option) + h_previous = self.get_unfolded_result( + jetR, obs_label, i-1, min_pt_truth, max_pt_truth, option) h.Divide(h_previous) else: continue - + elif option == 'ratio_to_final': print(reg_param_final) - h_final = self.get_unfolded_result(jetR, obs_label, reg_param_final, min_pt_truth, - max_pt_truth, option) + h_final = self.get_unfolded_result( + jetR, obs_label, reg_param_final, min_pt_truth, max_pt_truth, option) h.Divide(h_final) elif option == 'stat_uncert': - h = self.get_unfolded_result_uncertainties(jetR, obs_label, i, min_pt_truth, - max_pt_truth, option) - + h = self.get_unfolded_result_uncertainties( + jetR, obs_label, i, min_pt_truth, max_pt_truth, option) + elif option == '' and i == reg_param_final + 2: # Get input distribution name = getattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label)) @@ -599,7 +720,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, label = '{} = {}'.format(self.reg_param_name, i) leg.AddEntry(h, label, 'Pe') - + # Plot input spectrum if h_data and i == reg_param_final + 2: h_data.SetLineColor(1) @@ -607,7 +728,7 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, h_data.SetLineWidth(4) h_data.Draw('L hist same') leg.AddEntry(h_data, 'input data', 'L') - + leg.Draw() # Draw horizontal line at y = 1 @@ -620,7 +741,8 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, text_latex = ROOT.TLatex() text_latex.SetNDC() - text = str(min_pt_truth) + ' < #it{p}_{T, ch jet} < ' + str(max_pt_truth) + text = str(min_pt_truth) + ' < #it{p}_{T}^{ch jet} < ' + \ + str(max_pt_truth) + ' GeV/#it{c}' text_latex.DrawLatex(0.35, 0.85, text) text_latex = ROOT.TLatex() @@ -642,10 +764,9 @@ def plot_observable(self, jetR, obs_label, obs_setting, grooming_setting, else: output_dir = getattr(self, 'output_dir_Unfolded_obs') - outf_name = 'hUnfolded{}_{}_R{}_{}_{}-{}{}'.format(option, self.observable, - self.utils.remove_periods(jetR), - obs_label, int(min_pt_truth), - int(max_pt_truth), self.file_format) + outf_name = 'hUnfolded{}_{}_R{}_{}_{}-{}{}'.format( + option, self.observable, self.utils.remove_periods(jetR), obs_label, + int(min_pt_truth), int(max_pt_truth), self.file_format) outf_name = os.path.join(output_dir, outf_name) c.SaveAs(outf_name) c.Close() @@ -710,6 +831,7 @@ def plot_unfolded_pt(self, jetR, obs_label, obs_setting, grooming_setting): myPad.SetTopMargin(0.07) myPad.SetRightMargin(0.04) myPad.SetBottomMargin(0.13) + myPad.SetTicks(2, 2) myPad.Draw() myPad.cd() myPad.SetLogy() @@ -719,9 +841,9 @@ def plot_unfolded_pt(self, jetR, obs_label, obs_setting, grooming_setting): myBlankHisto = ROOT.TH1F('myBlankHisto','Blank Histogram', n_pt_bins_truth, truth_pt_bin_array) myBlankHisto.SetNdivisions(505) - myBlankHisto.SetXTitle('#it{p}_{T, ch jet}') + myBlankHisto.SetXTitle('#it{p}_{T}^{ch jet} (GeV/#it{c})') myBlankHisto.GetYaxis().SetTitleOffset(2.2) - myBlankHisto.SetYTitle('#frac{dN}{d#it{p}_{T, ch jet}}') + myBlankHisto.SetYTitle('#frac{dN}{d#it{p}_{T}^{ch jet}}') myBlankHisto.SetMaximum(5000) myBlankHisto.SetMinimum(1) myBlankHisto.Draw("E") @@ -757,7 +879,7 @@ def plot_unfolded_pt(self, jetR, obs_label, obs_setting, grooming_setting): h.SetMarkerSize(1.5) h.SetLineStyle(1) h.SetLineWidth(2) - + myBlankHisto.SetMaximum(10*h.GetMaximum()) myBlankHisto.SetMinimum(0.1*h.GetMinimum()) @@ -851,11 +973,12 @@ def plot_kinematic_efficiency_projections(self, hKinematicEfficiency2D, jetR, c.Draw() c.cd() - myPad = ROOT.TPad('myPad', 'The pad',0,0,1,1) + myPad = ROOT.TPad('myPad', 'The pad', 0, 0, 1, 1) myPad.SetLeftMargin(0.2) myPad.SetTopMargin(0.07) myPad.SetRightMargin(0.04) myPad.SetBottomMargin(0.13) + myPad.SetTicks(2, 2) myPad.Draw() myPad.cd() @@ -880,8 +1003,8 @@ def plot_kinematic_efficiency_projections(self, hKinematicEfficiency2D, jetR, hKinematicEfficiency2D.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) h = hKinematicEfficiency2D.ProjectionY() - name = 'hKinematicEfficiency_R{}_{}_{}-{}'.format(jetR, obs_label, - min_pt_truth, max_pt_truth) + name = 'hKinematicEfficiency_R{}_{}_{}-{}'.format( + jetR, obs_label, min_pt_truth, max_pt_truth) h.SetName(name) h.SetMarkerSize(1.5) h.SetMarkerStyle(self.MarkerArray[i]) @@ -895,7 +1018,8 @@ def plot_kinematic_efficiency_projections(self, hKinematicEfficiency2D, jetR, h.DrawCopy('P X0 same') - label = str(min_pt_truth) + ' < #it{p}_{T, ch jet} < ' + str(max_pt_truth) + ' GeV/#it{c}' + label = str(min_pt_truth) + ' < #it{p}_{T}^{ch jet} < ' + \ + str(max_pt_truth) + ' GeV/#it{c}' leg.AddEntry(h, label, 'Pe') leg.Draw() @@ -944,7 +1068,7 @@ def plot_RM_slices(self, jetR, obs_label, grooming_setting): max_pt_truth = truth_pt_bin_array[bin+1] self.plot_obs_response(jetR, obs_label, min_pt_truth, max_pt_truth, hResponse, grooming_setting) - + # Plot pt-response (summed over substructure observable) self.plot_pt_response(jetR, obs_label, hResponse) @@ -968,12 +1092,13 @@ def plot_obs_response(self, jetR, obs_label, min_pt_truth, max_pt_truth, hRespon int(max_pt_truth))) hResponse_Obs_Normalized = self.utils.normalize_response_matrix(hResponse_Obs) - + # Set z-maximum in Soft Drop case, since otherwise the untagged bin will dominate the scale if grooming_setting and 'sd' in grooming_setting: hResponse_Obs_Normalized.SetMaximum(0.3) - text = str(min_pt_truth) + ' < #it{p}_{T, ch jet}^{true} < ' + str(max_pt_truth) + text = str(min_pt_truth) + ' < #it{p}_{T, true}^{ch jet} < ' + \ + str(max_pt_truth) + ' GeV/#it{c}' output_dir = getattr(self, 'output_dir_RM') outf_name = '{}{}'.format(hResponse_Obs.GetName(), self.file_format) @@ -990,7 +1115,7 @@ def plot_pt_response(self, jetR, obs_label, hResponse): truth_pt_bin_array = getattr(self, 'truth_pt_bin_array_{}'.format(obs_label)) hResponse4D.GetAxis(1).SetRangeUser(truth_pt_bin_array[0], truth_pt_bin_array[-1]) - + det_pt_bin_array = getattr(self, 'det_pt_bin_array_{}'.format(obs_label)) hResponse4D.GetAxis(0).SetRangeUser(det_pt_bin_array[0], det_pt_bin_array[-1]) @@ -1014,7 +1139,7 @@ def plot_correlation_coefficients(self, covariance_matrix, jetR, obs_label, i): nBinsX = covariance_matrix.GetNrows() nBinsY = covariance_matrix.GetNcols() - + correlation_coefficient_matrix = ROOT.TH2D('correlation_coefficient_matrix', 'correlation_coefficient_matrix', nBinsX, 0, nBinsX, nBinsY, 0, nBinsY) correlation_coefficient_matrix.GetXaxis().SetTitle('bin #') correlation_coefficient_matrix.GetYaxis().SetTitle('bin #') @@ -1026,7 +1151,7 @@ def plot_correlation_coefficients(self, covariance_matrix, jetR, obs_label, i): for ybin in range(0, nBinsY): varianceY = covariance_matrix(ybin, ybin) sigmaY = np.sqrt(varianceY) - + covXY = covariance_matrix(xbin, ybin) if sigmaX > 0 and sigmaY > 0: Cxy = covXY / (sigmaX * sigmaY) @@ -1047,7 +1172,8 @@ def plot_correlation_coefficients(self, covariance_matrix, jetR, obs_label, i): ################################################################################################# def unfolding_checks(self, jetR, obs_label, obs_setting, grooming_setting): - # Smear MC truth spectrum according to the error bars on the measured spectrum, for closure test + # Smear detector-level MC spectrum according to the error bars on the + # measured data spectrum, for closure tests name_data = getattr(self, 'name_data_rebinned_R{}_{}'.format(jetR, obs_label)) hData = getattr(self, name_data) hMC_Det = getattr(self, 'hMC_Det_R{}_{}'.format(jetR, obs_label)) @@ -1055,6 +1181,11 @@ def unfolding_checks(self, jetR, obs_label, obs_setting, grooming_setting): measuredErrors = self.getMeasuredErrors(hData) self.smearSpectrum(hMC_Det, measuredErrors) + for s in [self.shape_variation_parameter1, self.shape_variation_parameter2]: + hMC_Det = getattr(self, 'hMC_Det_R{}_{}_shape{}'.format( + jetR, obs_label, str(s).replace('.', ''))) + self.smearSpectrum(hMC_Det, measuredErrors) + # Select final regularization parameter if self.use_max_reg_param: reg_param_final = self.max_reg_param @@ -1072,15 +1203,18 @@ def unfolding_checks(self, jetR, obs_label, obs_setting, grooming_setting): # Unfold the smeared det-level result with response, and compare to truth-level MC. self.statistical_closure_test(i, jetR, obs_label, obs_setting, grooming_setting) - + # Scale the shape of the det-level and truth-level spectra (by the same scaling as the prior), # and compare the unfolded MC det-level result to truth-level MC. self.shape_closure_test(i, jetR, obs_label, obs_setting, grooming_setting) - - # Plot thermal closure test - if self.thermal_model: - - self.plot_thermal_closure_test(jetR, obs_label, obs_setting, grooming_setting, reg_param_final) + + # Scale the prior distribution and unfold with nominal det/truth MC + self.prior_closure_test(i, jetR, obs_label, obs_setting, grooming_setting) + + # Plot thermal closure test + if self.thermal_model: + + self.plot_thermal_closure_test(i, jetR, obs_label, obs_setting, grooming_setting) ################################################################################################# # Apply RM to unfolded result, and check that I obtain measured spectrum (simple technical check) @@ -1090,7 +1224,7 @@ def refolding_test(self, i, jetR, obs_label, obs_setting, grooming_setting): response = getattr(self, 'roounfold_response_R{}_{}'.format(jetR, obs_label)) hUnfolded = getattr(self, 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, i)).Clone() hUnfolded.SetName('hUnfolded_{}_R{}_{}_{}-clone'.format(self.observable, jetR, obs_label, i)) - + # Undo the kinematic efficiency correction -- we don't want to apply it for the refolding test if not self.use_miss_fake: hKinematicEfficiency = getattr(self, 'hKinematicEfficiency_R{}_{}'.format(jetR, obs_label)) @@ -1130,18 +1264,17 @@ def plot_obs_refolded_slice(self, hFoldedTruth, i, jetR, obs_label, obs_setting, min_pt_det, max_pt_det)) legendTitle = '' - h1LegendLabel = 'Folded truth, {} = {}'.format(self.reg_param_name,i) + h1LegendLabel = 'Folded truth' h2LegendLabel = 'Measured data' ratioYAxisTitle = 'Folded truth / Measured' output_dir = getattr(self, 'output_dir_Test_Refolding') - outf_name = 'hFoldedTruth_R{}_{}_{}-{}_{}{}'.format(self.utils.remove_periods(jetR), - obs_label, int(min_pt_det), - int(max_pt_det), i, self.file_format) + outf_name = 'hFoldedTruth_R{}_{}_{}-{}_{}{}'.format( + self.utils.remove_periods(jetR), obs_label, int(min_pt_det), int(max_pt_det), i, self.file_format) outf_name = os.path.join(output_dir, outf_name) - self.plot_obs_ratio(hFolded_obs, hData_obs, None, self.ytitle, ratioYAxisTitle, - int(min_pt_det), int(max_pt_det), jetR, obs_label, obs_setting, - grooming_setting, outf_name, 'width', legendTitle, - h1LegendLabel, h2LegendLabel, min_bin_for_normalization=2) + self.plot_obs_ratio( + hFolded_obs, hData_obs, None, self.ytitle, ratioYAxisTitle, int(min_pt_det), int(max_pt_det), + jetR, obs_label, obs_setting, grooming_setting, outf_name, 'width', legendTitle, + h1LegendLabel, h2LegendLabel, plotTitle="Refolding Test", reg_param=i, min_bin_for_normalization=2) ################################################################################################# # Plot refolding test, for pt dimension @@ -1162,16 +1295,17 @@ def plot_pt_refolded_slice(self, hFoldedTruth, i, jetR, obs_label, obs_setting, min_pt_det, max_pt_det)) legendTitle = '' - h1LegendLabel = 'Folded truth, {} = {}'.format(self.reg_param_name,i) + h1LegendLabel = 'Folded truth' h2LegendLabel = 'Measured data' ratioYAxisTitle = 'Folded truth / Measured' output_dir = getattr(self, 'output_dir_Test_Refolding') outf_name = 'hFoldedTruth_pt_R{}_{}_{}{}'.format(self.utils.remove_periods(jetR), obs_label, i, self.file_format) outf_name = os.path.join(output_dir, outf_name) - self.plot_obs_ratio(hFolded_pt, hData_pt, None, self.ytitle, ratioYAxisTitle, 0, 0, - jetR, obs_label, obs_setting, grooming_setting, outf_name, - 'width', legendTitle, h1LegendLabel, h2LegendLabel) + self.plot_obs_ratio( + hFolded_pt, hData_pt, None, self.ytitle, ratioYAxisTitle, 0, 0, jetR, obs_label, obs_setting, + grooming_setting, outf_name, 'width', legendTitle, h1LegendLabel, h2LegendLabel, + plotTitle="Refolding Test", reg_param=i) ################################################################################################# # Statistical closure test: Smear data, then unfold and compare to original truth @@ -1209,17 +1343,17 @@ def plot_obs_closure_slice(self, hUnfolded, hMC_Truth, i, jetR, obs_label, obs_s hUnfolded.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) hUnfolded_obs = hUnfolded.ProjectionY() - hUnfolded_obs.SetName('hUnfolded_obs_R{}_{}_{}_{}-{}'.format(jetR, obs_label, i, - min_pt_truth, max_pt_truth)) + hUnfolded_obs.SetName('hUnfolded_obs_R{}_{}_{}_{}-{}'.format( + jetR, obs_label, i, min_pt_truth, max_pt_truth)) hMC_Truth.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) hMCTruth_obs = hMC_Truth.ProjectionY() - hMCTruth_obs.SetName('hMCTruth_obs_R{}_{}_{}_{}-{}'.format(jetR, obs_label, i, - min_pt_truth, max_pt_truth)) + hMCTruth_obs.SetName('hMCTruth_obs_R{}_{}_{}_{}-{}'.format( + jetR, obs_label, i, min_pt_truth, max_pt_truth)) legendTitle = '' - h1LegendLabel = 'Unfolded MC-det, {} = {}'.format(self.reg_param_name,i) - h2LegendLabel = 'MC-truth' + h1LegendLabel = 'Unfolded MC det' + h2LegendLabel = 'MC truth' ratioYAxisTitle = 'Unfolded MC det / Truth' output_dir = getattr(self, 'output_dir_Test_{}Closure{}'.format(option, self.utils.remove_periods(suffix))) @@ -1227,10 +1361,10 @@ def plot_obs_closure_slice(self, hUnfolded, hMC_Truth, i, jetR, obs_label, obs_s obs_label, int(min_pt_truth), int(max_pt_truth), i, self.file_format) outf_name = os.path.join(output_dir, outf_name) - self.plot_obs_ratio(hUnfolded_obs, hMCTruth_obs, None, self.ytitle, - ratioYAxisTitle, min_pt_truth, max_pt_truth, jetR, - obs_label, obs_setting, grooming_setting, outf_name, - 'width', legendTitle, h1LegendLabel, h2LegendLabel) + self.plot_obs_ratio( + hUnfolded_obs, hMCTruth_obs, None, self.ytitle, ratioYAxisTitle, min_pt_truth, max_pt_truth, + jetR, obs_label, obs_setting, grooming_setting, outf_name, 'width', legendTitle, + h1LegendLabel, h2LegendLabel, plotTitle="%s Closure Test" % option, reg_param=i) ################################################################################################# # Plot closure test, for pt dimension @@ -1248,27 +1382,28 @@ def plot_pt_closure_slice(self, hUnfolded, hMC_Truth, i, jetR, obs_label, hMCTruth_pt.SetName('hMCTruth_pt_R{}_{}_{}_'.format(jetR, obs_label, i)) legendTitle = '' - h1LegendLabel = 'Unfolded MC-det, {} = {}'.format(self.reg_param_name,i) - h2LegendLabel = 'MC-truth' + h1LegendLabel = 'Unfolded MC det' + h2LegendLabel = 'MC truth' ratioYAxisTitle = 'Unfolded MC det / Truth' output_dir = getattr(self, 'output_dir_Test_{}Closure{}'.format(option, self.utils.remove_periods(suffix))) outf_name = 'hClosure_pt_R{}_{}_{}{}'.format(self.utils.remove_periods(jetR), obs_label, i, self.file_format) outf_name = os.path.join(output_dir, outf_name) - self.plot_obs_ratio(hUnfolded_pt, hMCTruth_pt, None, self.ytitle, ratioYAxisTitle, - 0, 0, jetR, obs_label, obs_setting, grooming_setting, outf_name, - 'width', legendTitle, h1LegendLabel, h2LegendLabel) + self.plot_obs_ratio( + hUnfolded_pt, hMCTruth_pt, None, self.ytitle, ratioYAxisTitle, 0, 0, jetR, obs_label, + obs_setting, grooming_setting, outf_name, 'width', legendTitle, h1LegendLabel, h2LegendLabel, + plotTitle="%s Closure Test" % option, reg_param=i) ################################################################################################# # Scale the shape of the det-level and truth-level spectra (by the same scaling as the prior), # and compare the unfolded MC det-level result to truth-level MC. ################################################################################################# def shape_closure_test(self, i, jetR, obs_label, obs_setting, grooming_setting): - + self.shape_closure_test_single(i, jetR, obs_label, obs_setting, grooming_setting, self.shape_variation_parameter1) - + self.shape_closure_test_single(i, jetR, obs_label, obs_setting, grooming_setting, self.shape_variation_parameter2) @@ -1278,17 +1413,55 @@ def shape_closure_test(self, i, jetR, obs_label, obs_setting, grooming_setting): ################################################################################################# def shape_closure_test_single(self, i, jetR, obs_label, obs_setting, grooming_setting, shape_variation_parameter): - # Unfold smeared det-level spectrum with RM + # Obtain nominal RM for doing the unfolding response = getattr(self, 'roounfold_response_R{}_{}'.format(jetR, obs_label)) - hMC_Det_original = getattr(self, 'hMC_Det_R{}_{}'.format(jetR, obs_label)) - hMC_Det = hMC_Det_original.Clone('{}_shape'.format(hMC_Det_original.GetName())) - - hMC_Truth_original = getattr(self, 'hMC_Truth_R{}_{}'.format(jetR, obs_label)) - hMC_Truth = hMC_Truth_original.Clone('{}_shape'.format(hMC_Truth_original.GetName())) - # Scale the det-level and truth-level MC by the prior variation - self.utils.scale_by_prior(hMC_Det, shape_variation_parameter) - self.utils.scale_by_prior(hMC_Truth, shape_variation_parameter) + # Use prior-scaled RM projections as the det and truth distributions + scaled_response = getattr(self, getattr(self, 'name_thn_rebinned_shape{}_R{}_{}'.format( + self.utils.remove_periods(self.shape_variation_parameter1), jetR, obs_label))) + hMC_Truth = getattr(self, 'hMC_Truth_R{}_{}_shape{}'.format( + jetR, obs_label, str(shape_variation_parameter).replace('.', ''))) + hMC_Det = getattr(self, 'hMC_Det_R{}_{}_shape{}'.format( + jetR, obs_label, str(shape_variation_parameter).replace('.', ''))) + + unfold2 = ROOT.RooUnfoldBayes(response, hMC_Det, i) + hUnfolded2 = unfold2.Hreco() # Produces the truth distribution, with errors, PerBin + + for bin in range(0, len(self.pt_bins_reported) - 1): + min_pt_truth = self.pt_bins_reported[bin] + max_pt_truth = self.pt_bins_reported[bin+1] + + self.plot_obs_closure_slice(hUnfolded2, hMC_Truth, i, jetR, obs_label, + obs_setting, grooming_setting, min_pt_truth, max_pt_truth, option='Shape', + suffix=shape_variation_parameter) + + # Closure test for pt dimension + self.plot_pt_closure_slice( + hUnfolded2, hMC_Truth, i, jetR, obs_label, obs_setting, grooming_setting, + self.pt_bins_reported[0], self.pt_bins_reported[-1], option='Shape', + suffix=shape_variation_parameter) + + ################################################################################################# + # Scale the prior distribution and unfold with nominal det/truth MC + ################################################################################################# + def prior_closure_test(self, i, jetR, obs_label, obs_setting, grooming_setting): + + self.prior_closure_test_single(i, jetR, obs_label, obs_setting, grooming_setting, + self.shape_variation_parameter1) + + self.prior_closure_test_single(i, jetR, obs_label, obs_setting, grooming_setting, + self.shape_variation_parameter2) + + ################################################################################################# + # Scale the prior distribution and unfold with nominal det/truth MC + ################################################################################################# + def prior_closure_test_single(self, i, jetR, obs_label, obs_setting, grooming_setting, shape_variation_parameter): + + # Unfold smeared det-level spectrum using prior-varied RM + response = getattr(self, 'roounfold_response_shape{}_R{}_{}'.format( + self.utils.remove_periods(shape_variation_parameter), jetR, obs_label)) + hMC_Det = getattr(self, 'hMC_Det_R{}_{}'.format(jetR, obs_label)) + hMC_Truth = getattr(self, 'hMC_Truth_R{}_{}'.format(jetR, obs_label)) unfold2 = ROOT.RooUnfoldBayes(response, hMC_Det, i) hUnfolded2 = unfold2.Hreco() # Produces the truth distribution, with errors, PerBin @@ -1299,51 +1472,41 @@ def shape_closure_test_single(self, i, jetR, obs_label, obs_setting, grooming_se self.plot_obs_closure_slice(hUnfolded2, hMC_Truth, i, jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, - max_pt_truth, option='Shape', suffix=shape_variation_parameter) + max_pt_truth, option='Prior', suffix=shape_variation_parameter) # Closure test for pt dimension - self.plot_pt_closure_slice(hUnfolded2, hMC_Truth, i, jetR, obs_label, - obs_setting, grooming_setting, - self.pt_bins_reported[0], self.pt_bins_reported[-1], - option='Shape', suffix=shape_variation_parameter) + self.plot_pt_closure_slice( + hUnfolded2, hMC_Truth, i, jetR, obs_label, obs_setting, grooming_setting, + self.pt_bins_reported[0], self.pt_bins_reported[-1], option='Prior', + suffix=shape_variation_parameter) ################################################################################################# # Plot thermal closure test: unfolded result / truth ################################################################################################# - def plot_thermal_closure_test(self, jetR, obs_label, obs_setting, grooming_setting, reg_param_final): - + def plot_thermal_closure_test(self, i, jetR, obs_label, obs_setting, grooming_setting): + # Get MC truth hMC_Truth = getattr(self, 'hMC_Truth_R{}_{}'.format(jetR, obs_label)) - + # Loop through pt bins n_pt_bins_truth = getattr(self, 'n_pt_bins_truth_{}'.format(obs_label)) truth_pt_bin_array = getattr(self, 'truth_pt_bin_array_{}'.format(obs_label)) for bin in range(1, n_pt_bins_truth-1): min_pt_truth = truth_pt_bin_array[bin] max_pt_truth = truth_pt_bin_array[bin+1] - + # Get unfolded result - hUnfolded_obs = self.get_unfolded_result(jetR, obs_label, reg_param_final, min_pt_truth, - max_pt_truth, scaling_option='') - - # Get MC truth projection - hMC_Truth.GetXaxis().SetRangeUser(min_pt_truth, max_pt_truth) - hMCTruth_obs = hMC_Truth.ProjectionY() - hMCTruth_obs.SetName('hMCTruth_obs_R{}_{}_{}_{}-{}'.format(jetR, obs_label, reg_param_final, - min_pt_truth, max_pt_truth)) - + hUnfolded = getattr(self, 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, i)) + # Plot ratio - legendTitle = '' - h1LegendLabel = 'Unfolded result' - h2LegendLabel = 'MC-truth' - ratioYAxisTitle = 'Unfolded / Truth' - output_dir = getattr(self, 'output_dir_Test_ThermalClosure') - outf_name = 'hThermalClosure_R{}_{}_{}-{}{}'.format(self.utils.remove_periods(jetR), - obs_label, min_pt_truth, max_pt_truth, self.file_format) - outf_name = os.path.join(output_dir, outf_name) - self.plot_obs_ratio(hUnfolded_obs, hMCTruth_obs, None, self.ytitle, ratioYAxisTitle, - min_pt_truth, max_pt_truth, jetR, obs_label, obs_setting, grooming_setting, outf_name, - 'width', legendTitle, h1LegendLabel, h2LegendLabel) + self.plot_obs_closure_slice( + hUnfolded, hMC_Truth, i, jetR, obs_label, obs_setting, + grooming_setting, min_pt_truth, max_pt_truth, option='Thermal') + + # Closure test for pt dimension + self.plot_pt_closure_slice( + hUnfolded, hMC_Truth, i, jetR, obs_label, obs_setting, grooming_setting, + self.pt_bins_reported[0], self.pt_bins_reported[-1], option='Thermal') ################################################################################################# # Get errors from measured spectrum, stored as dictionary {bin:error} @@ -1390,33 +1553,35 @@ def smearSpectrum(self, h, measuredErrors): ################################################################################################# # Get MC-det 2D projection ################################################################################################# - def get_MCdet2D(self, jetR, obs_label): + def get_MCdet2D(self, jetR, obs_label, name, shape_var=None): # (pt-det, pt-true, obs-det, obs-true) - name_response = getattr(self, 'name_thn_rebinned_R{}_{}'.format(jetR, obs_label)) + name_response = self.utils.name_thn_rebinned(self.observable, jetR, obs_label) + \ + "_shape" + str(shape_var).replace('.', '') if shape_var else \ + getattr(self, 'name_thn_rebinned_R{}_{}'.format(jetR, obs_label)) hResponse = getattr(self, name_response) - hResponse4D = hResponse.Clone() - hResponse4D.SetName('{}_clone'.format(hResponse4D.GetName())) + hResponse4D = hResponse.Clone('{}_clone'.format(hResponse.GetName())) hMC_Det = hResponse4D.Projection(2,0) - hMC_Det.SetName('hMC_Det_R{}_{}'.format(jetR, obs_label)) + hMC_Det.SetName(name) return hMC_Det ################################################################################################# # Get MC-det 2D projection ################################################################################################# - def get_MCtruth2D(self, jetR, obs_label): + def get_MCtruth2D(self, jetR, obs_label, name, shape_var=None): # (pt-det, pt-true, obs-det, obs-true) - name_response = getattr(self, 'name_thn_rebinned_R{}_{}'.format(jetR, obs_label)) + name_response = self.utils.name_thn_rebinned(self.observable, jetR, obs_label) + \ + "_shape" + str(shape_var).replace('.', '') if shape_var else \ + getattr(self, 'name_thn_rebinned_R{}_{}'.format(jetR, obs_label)) hResponse = getattr(self, name_response) - hResponse4D = hResponse.Clone() - hResponse4D.SetName('{}_clone'.format(hResponse4D.GetName())) + hResponse4D = hResponse.Clone('{}_clone'.format(hResponse.GetName())) hMC_Truth = hResponse4D.Projection(3,1) - hMC_Truth.SetName('hMC_Truth_R{}_{}'.format(jetR, obs_label)) + hMC_Truth.SetName(name) return hMC_Truth ################################################################################################# @@ -1426,6 +1591,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, max_pt_det, jetR, obs_label, obs_setting, grooming_setting, outputFilename, scalingOptions = "", legendTitle = "", hLegendLabel = "", h2LegendLabel = "", h3LegendLabel = "", + plotTitle = "", reg_param = None, yRatioMax = 2.2, min_bin_for_normalization=1): self.utils.set_plotting_options() @@ -1440,6 +1606,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, pad1.SetTopMargin(0.05) if '_pt_' in outputFilename: pad1.SetLogy() + pad1.SetTicks(2, 2) pad1.Draw() pad1.cd() @@ -1457,7 +1624,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, h.Scale(1./integral) if '_pt_' in outputFilename: - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{p}_{T}}') + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{p}_{T}^{ch jet}}') else: h.GetYaxis().SetTitle(yAxisTitle) @@ -1475,7 +1642,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, h2.SetLineColor(4) h2.SetLineWidth(2) h2.SetLineStyle(1) - + # First scale by bin width -- then normalize by integral # (where integral weights by bin width) h2.Scale(1., scalingOptions) @@ -1489,7 +1656,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, h3.SetLineColor(2) h3.SetLineWidth(2) h3.SetLineStyle(1) - + # First scale by bin width -- then normalize by integral # (where integral weights by bin width) h3.Scale(1., scalingOptions) @@ -1504,6 +1671,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, pad2.SetBottomMargin(0.35) pad2.SetLeftMargin(0.2) pad2.SetRightMargin(0.05) + pad2.SetTicks(2, 2) pad2.Draw() pad2.cd() @@ -1519,7 +1687,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, hRatio.GetXaxis().SetLabelFont(43) hRatio.GetXaxis().SetLabelSize(20) if '_pt_' in outputFilename: - hRatio.GetXaxis().SetTitle('#it{p}_{T,jet}') + hRatio.GetXaxis().SetTitle('#it{p}_{T}^{ch jet} (GeV/#it{c})') else: hRatio.GetXaxis().SetTitle(xAxisTitle) @@ -1534,7 +1702,7 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, max= hRatio.GetBinContent(hRatio.GetMaximumBin()) #automatic zoom-in for a very small scatter of the points if min>0.5 and max<1.5: - hRatio.GetYaxis().SetRangeUser(0.5,1.5) + hRatio.GetYaxis().SetRangeUser(0.5, 1.5) elif yRatioMax>2: hRatio.GetYaxis().SetRangeUser(0,yRatioMax) else: @@ -1549,15 +1717,17 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, hRatio3.SetMarkerStyle(21) hRatio3.SetMarkerColor(2) hRatio3.Draw("P E same") - - line = ROOT.TLine(self.xmin,1,self.xmax,1) + + xmin = hRatio.GetXaxis().GetBinLowEdge(1) + xmax = hRatio.GetXaxis().GetBinLowEdge(1+hRatio.GetNbinsX()) + line = ROOT.TLine(xmin, 1, xmax, 1) line.SetLineColor(1) line.SetLineStyle(2) line.Draw('same') pad1.cd() - leg2 = ROOT.TLegend(0.55,0.7,0.8,0.93,legendTitle) + leg2 = ROOT.TLegend(0.65,0.7,0.9,0.93,legendTitle) leg2.SetFillColor(10) leg2.SetBorderSize(0) leg2.SetFillStyle(0) @@ -1569,34 +1739,51 @@ def plot_obs_ratio(self, h, h2, h3, yAxisTitle, ratioYAxisTitle, min_pt_det, leg2.AddEntry(h2, h2LegendLabel, "l") leg2.Draw("same") + y_pos = 0.85 + if plotTitle: + text_latex = ROOT.TLatex() + text_latex.SetNDC() + text_latex.DrawLatex(0.25, y_pos, plotTitle) + y_pos -= 0.07 + + if reg_param: + text_latex = ROOT.TLatex() + text_latex.SetNDC() + text = "%s = %s" % (self.reg_param_name, str(reg_param)) + text_latex.DrawLatex(0.25, y_pos, text) + y_pos -= 0.07 + if not '_pt_' in outputFilename: text_latex = ROOT.TLatex() text_latex.SetNDC() - text = str(min_pt_det) + ' < #it{p}_{T,ch jet} < ' + str(max_pt_det) - text_latex.DrawLatex(0.25, 0.85, text) + text = str(min_pt_det) + ' < #it{p}_{T}^{ch jet} < ' + \ + str(max_pt_det) + ' GeV/#it{c}' + text_latex.DrawLatex(0.25, y_pos, text) + y_pos -= 0.07 text_latex = ROOT.TLatex() text_latex.SetNDC() text = '#it{R} = ' + str(jetR) - text_latex.DrawLatex(0.25, 0.78, text) - subobs_label = self.utils.formatted_subobs_label(self.observable) if subobs_label: - text = '{} = {}'.format(subobs_label, obs_setting) - text_latex.DrawLatex(0.25, 0.71, text) + text += ', {} = {}'.format(subobs_label, obs_setting) + text_latex.DrawLatex(0.25, y_pos, text) + y_pos -= 0.07 if grooming_setting: text = self.utils.formatted_grooming_label(grooming_setting) - text_latex.DrawLatex(0.25, 0.64, text) + text_latex.DrawLatex(0.25, y_pos, text) + y_pos -= 0.07 c.SaveAs(outputFilename) c.Close() - + if 'ThermalClosure' in outputFilename: - fname = 'nonclosureR{}_{}_{}_{}-{}.root'.format(jetR, obs_setting, grooming_setting, int(min_pt_det), int(max_pt_det)) + fname = 'nonclosureR{}_{}_{}_{}-{}.root'.format( + jetR, obs_setting, grooming_setting, int(min_pt_det), int(max_pt_det)) outf_name = os.path.join(getattr(self, 'output_dir_Test_ThermalClosure'), fname) - f = ROOT.TFile(outf_name, 'RECREATE') - hRatio.Write('hNonclosureRatio') + f = ROOT.TFile(outf_name, 'UPDATE') + hRatio.Write('hNonclosureRatio_n'+str(reg_param), ROOT.TFile.kOverwrite) f.Close() #--------------------------------------------------------------------------------------------------- diff --git a/pyjetty/alice_analysis/analysis/user/substructure/run_analysis.py b/pyjetty/alice_analysis/analysis/user/substructure/run_analysis.py index 7e1f87710..50a879ac7 100644 --- a/pyjetty/alice_analysis/analysis/user/substructure/run_analysis.py +++ b/pyjetty/alice_analysis/analysis/user/substructure/run_analysis.py @@ -122,12 +122,15 @@ def initialize_config(self): if 'max_reg_param' in self.obs_config_dict['common_settings']: self.max_reg_param = self.obs_config_dict['common_settings']['max_reg_param'] if self.max_reg_param < 3: - print("ERROR: The minimum number of iterations has been set to 3.", + raise ValueError("ERROR: The minimum number of iterations has been set to 3. " + \ "Please set max_reg_param to a value >= 3.") - raise ValueError() self.use_max_reg_param = True self.reg_param_name = 'n_iter' + # Vary the regularization parameter by amount set in config + self.reg_param_variation = self.obs_config_dict["common_settings"]["reg_param_variation"] if \ + "reg_param_variation" in self.obs_config_dict["common_settings"] else 2 + # Retrieve histogram binnings for each observable setting for i, _ in enumerate(self.obs_subconfig_list): @@ -309,10 +312,8 @@ def perform_unfolding(self): R_max = self.R_max2 elif systematic == 'prong_matching': prong_matching_response = True - elif systematic == 'fastsim_generator0': - response = self.fastsim_response_list[0] - elif systematic == 'fastsim_generator1': - response = self.fastsim_response_list[1] + elif 'fastsim_generator' in systematic: + response = self.fastsim_response_list[int(systematic[-1])] elif systematic == 'random_mass': data = self.randmass_data response = self.randmass_response @@ -325,15 +326,15 @@ def perform_unfolding(self): truncation=truncation, binning=binning, R_max=R_max, prong_matching_response=prong_matching_response, use_miss_fake=self.use_miss_fake) analysis.roounfold_obs() - - # Unfold thermal closure test - if self.do_thermal_closure: - + + # Unfold thermal closure test (for main R_max only) + if self.do_thermal_closure and R_max == self.R_max: + output_dir = getattr(self, 'output_dir_thermal_closure') rebin_response = self.check_rebin_response(output_dir) - + analysis = roounfold_obs.Roounfold_Obs( - self.observable, self.fThermal, self.fThermal, self.config_file, output_dir, + self.observable, self.main_data, self.fThermal, self.config_file, output_dir, self.file_format, rebin_response=rebin_response, R_max=R_max, thermal_model = True, use_miss_fake=self.use_miss_fake) analysis.roounfold_obs() @@ -478,17 +479,17 @@ def compute_systematics(self, jetR, obs_label, obs_setting, grooming_setting): self.compute_obs_systematic(jetR, obs_label, obs_setting, grooming_setting, reg_param_final, min_pt_truth, max_pt_truth, minbin, maxbin, final=True) - + # Set SD tagging fraction for final reg parameter if grooming_setting and 'sd' in grooming_setting: f_tagging_name = 'tagging_fraction_R{}_{}_{}-{}'.format( jetR, obs_label, min_pt_truth, max_pt_truth) f_tagged = getattr(self, '{}_{}'.format(f_tagging_name, reg_param_final)) setattr(self, f_tagging_name, f_tagged) - + # Copy plots of final reg param, for convenience self.copy_unfolding_tests(jetR, obs_label, reg_param_final, min_pt_truth, max_pt_truth) - + #---------------------------------------------------------------------- def copy_unfolding_tests(self, jetR, obs_label, reg_param_final, min_pt, max_pt): @@ -520,40 +521,38 @@ def copy_unfolding_tests(self, jetR, obs_label, reg_param_final, min_pt, max_pt) new_name = 'hFoldedTruth_{}_{}-{}_final{}'.format( label, min_det_pt, max_det_pt, self.file_format) shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) - - # Copy statistical closure test - outputdir_test = os.path.join(outputdir_main, 'Test_StatisticalClosure') - old_name = 'hClosure_pt_{}_{}{}'.format(label, reg_param_final, self.file_format) - new_name = 'hStatisticalClosure_pt_{}_final{}'.format(label, self.file_format) - shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) - old_name = 'hClosure_{}_{}-{}_{}{}'.format( - label, min_pt, max_pt, reg_param_final, self.file_format) - new_name = 'hStatisticalClosure_{}_{}-{}_final{}'.format( - label, min_pt, max_pt, self.file_format) - shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) - + + # Copy closure tests that don't depend on a shape parameter + for type in ["Statistical", "Thermal"]: + if type == "Thermal": + if "thermal_closure" in self.systematics_list: + outputdir_test = os.path.join(getattr(self, 'output_dir_thermal_closure'), 'Test_ThermalClosure') + else: + continue + else: + outputdir_test = os.path.join(outputdir_main, 'Test_%sClosure' % type) + old_name = 'hClosure_pt_{}_{}{}'.format(label, reg_param_final, self.file_format) + new_name = 'h{}Closure_pt_{}_final{}'.format(type, label, self.file_format) + shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) + old_name = 'hClosure_{}_{}-{}_{}{}'.format( + label, min_pt, max_pt, reg_param_final, self.file_format) + new_name = 'h{}Closure_{}_{}-{}_final{}'.format( + type, label, min_pt, max_pt, self.file_format) + shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) + # Copy shape closure test - parameter = self.utils.remove_periods(self.prior1_variation_parameter) - outputdir_test = os.path.join(outputdir_main, 'Test_ShapeClosure{}'.format(parameter)) - old_name = 'hClosure_pt_{}_{}{}'.format(label, reg_param_final, self.file_format) - new_name = 'hShapeClosure{}_pt_{}_final{}'.format(parameter, label, self.file_format) - shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) - old_name = 'hClosure_{}_{}-{}_{}{}'.format( - label, min_pt, max_pt, reg_param_final, self.file_format) - new_name = 'hShapeClosure{}_{}_{}-{}_final{}'.format( - parameter, label, min_pt, max_pt, self.file_format) - shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) - - parameter = self.utils.remove_periods(self.prior2_variation_parameter) - outputdir_test = os.path.join(outputdir_main, 'Test_ShapeClosure{}'.format(parameter)) - old_name = 'hClosure_pt_{}_{}{}'.format(label, reg_param_final, self.file_format) - new_name = 'hShapeClosure{}_pt_{}_final{}'.format(parameter, label, self.file_format) - shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) - old_name = 'hClosure_{}_{}-{}_{}{}'.format( - label, min_pt, max_pt, reg_param_final, self.file_format) - new_name = 'hShapeClosure{}_{}_{}-{}_final{}'.format( - parameter, label, min_pt, max_pt, self.file_format) - shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) + for type in ["Shape", "Prior"]: + for parameter in [self.utils.remove_periods(self.prior1_variation_parameter), \ + self.utils.remove_periods(self.prior2_variation_parameter)]: + outputdir_test = os.path.join(outputdir_main, 'Test_%sClosure%s' % (type, parameter)) + old_name = 'hClosure_pt_{}_{}{}'.format(label, reg_param_final, self.file_format) + new_name = 'h{}Closure{}_pt_{}_final{}'.format(type, parameter, label, self.file_format) + shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) + old_name = 'hClosure_{}_{}-{}_{}{}'.format( + label, min_pt, max_pt, reg_param_final, self.file_format) + new_name = 'h{}Closure{}_{}_{}-{}_final{}'.format( + type, parameter, label, min_pt, max_pt, self.file_format) + shutil.copy(os.path.join(outputdir_test, old_name), os.path.join(outputdir, new_name)) #---------------------------------------------------------------------- def load_2D_observables(self, jetR, obs_label, obs_setting, grooming_setting, reg_param): @@ -569,15 +568,17 @@ def load_2D_observables(self, jetR, obs_label, obs_setting, grooming_setting, re if systematic == 'main': # Get regularization parameter variations, and store as attributes - name = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param+2) + name = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param+self.reg_param_variation) self.retrieve_histo_and_set_attribute(name, f) - name = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param-2) + name = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param-self.reg_param_variation) self.retrieve_histo_and_set_attribute(name, f) #---------------------------------------------------------------------- def retrieve_histo_and_set_attribute(self, name, f, suffix = ''): h = f.Get(name) + if not h: + raise ValueError("Histogram with name %s not found in file %s" % (name, f.GetName())) h.SetDirectory(0) setattr(self, '{}{}'.format(name, suffix), h) @@ -602,13 +603,13 @@ def load_1D_observables(self, jetR, obs_label, obs_setting, grooming_setting, if systematic == 'main': # Get regularization parameter variations, and store as attributes - name2D = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param+2) + name2D = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param+self.reg_param_variation) name1D = 'hRegParam1_{}_R{}_{}_n{}_{}-{}'.format(self.observable, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) hRegParam1 = self.get_obs_distribution(jetR, obs_label, name2D, name1D, reg_param, grooming_setting, min_pt_truth, max_pt_truth, minbin, maxbin) - name2D = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param-2) + name2D = 'hUnfolded_{}_R{}_{}_{}'.format(self.observable, jetR, obs_label, reg_param-self.reg_param_variation) name1D = 'hRegParam2_{}_R{}_{}_n{}_{}-{}'.format(self.observable, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) hRegParam2 = self.get_obs_distribution(jetR, obs_label, name2D, name1D, reg_param, grooming_setting, @@ -621,68 +622,97 @@ def compute_obs_systematic(self, jetR, obs_label, obs_setting, grooming_setting, reg_param, min_pt_truth, max_pt_truth, minbin=None, maxbin=None, final=False): # Get main result - name = 'h{}_{}_R{}_{}_n{}_{}-{}'.format('main', self.observable, jetR, - obs_label, reg_param, min_pt_truth, max_pt_truth) + name = 'h{}_{}_R{}_{}_n{}_{}-{}'.format( + 'main', self.observable, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) hMain = getattr(self, name) if final: # Also save under name without reg param, won't need this info later - name = 'h{}_{}_R{}_{}_{}-{}'.format('main', self.observable, jetR, - obs_label, min_pt_truth, max_pt_truth) + name = 'h{}_{}_R{}_{}_{}-{}'.format( + 'main', self.observable, jetR, obs_label, min_pt_truth, max_pt_truth) setattr(self, name, hMain) # Loop through all systematic variations, and take ratio to main result h_list = [] h_unfolding_list = [] + h_fastsim_list = [] + do_generator = False for systematic in self.systematics_list: - if final: + if not do_generator and 'generator' in systematic: + do_generator = True + + if final and 'generator' not in systematic: # ** For subjet JEWEL systematic, set a larger reg param so that it can converge reg_param_original = reg_param - if not self.is_pp and 'subjet_z' in self.observable and 'generator1' in systematic: + if not self.is_pp and 'subjet_z' in self.observable: reg_param = self.max_reg_param - h_systematic_ratio = self.retrieve_systematic(systematic, jetR, obs_label, - reg_param, min_pt_truth, max_pt_truth) + h_systematic_ratio = self.retrieve_systematic( + systematic, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) # ** Reset reg param reg_param = reg_param_original else: - h_systematic_ratio = self.construct_systematic(systematic, hMain, jetR, obs_label, obs_setting, grooming_setting, - reg_param, min_pt_truth, max_pt_truth, minbin, maxbin) + h_systematic_ratio = self.construct_systematic( + systematic, hMain, jetR, obs_label, obs_setting, grooming_setting, + reg_param, min_pt_truth, max_pt_truth, minbin, maxbin) if h_systematic_ratio: if systematic in ['main', 'prior1', 'truncation', 'binning']: h_unfolding_list.append(h_systematic_ratio) + elif 'generator' in systematic: + if h_systematic_ratio: + h_fastsim_list.append(h_systematic_ratio) else: h_list.append(h_systematic_ratio) - + # Construct or retrieve unfolding uncertainty name = 'hSystematic_Unfolding_R{}_{}_n{}_{}-{}'.format( - self.utils.remove_periods(jetR), obs_label, - reg_param, int(min_pt_truth), int(max_pt_truth)) + self.utils.remove_periods(jetR), obs_label, reg_param, int(min_pt_truth), int(max_pt_truth)) if final: hSystematic_Unfolding = getattr(self, name) - name = 'hSystematic_Unfolding_R{}_{}_{}-{}'.format(self.utils.remove_periods(jetR), obs_label, - int(min_pt_truth), int(max_pt_truth)) + name = 'hSystematic_Unfolding_R{}_{}_{}-{}'.format( + self.utils.remove_periods(jetR), obs_label, int(min_pt_truth), int(max_pt_truth)) setattr(self, name, hSystematic_Unfolding) - else: hSystematic_Unfolding = self.construct_unfolding_uncertainty(h_unfolding_list) setattr(self, name, hSystematic_Unfolding) h_list.append(hSystematic_Unfolding) + if do_generator: + # Construct or retrieve generator uncertainty + name = 'hSystematic_generator_R{}_{}_n{}_{}-{}'.format( + self.utils.remove_periods(jetR), obs_label, reg_param, int(min_pt_truth), int(max_pt_truth)) + if final: + hSystematic_fastsim = getattr(self, name) + name = 'hSystematic_generator_R{}_{}_{}-{}'.format( + self.utils.remove_periods(jetR), obs_label, int(min_pt_truth), int(max_pt_truth)) + setattr(self, name, hSystematic_fastsim) + else: + # Get reference generator unfolded result (e.g. pythia fastsim) + name_reference = 'h{}_{}_R{}_{}_n{}_{}-{}'.format( + 'fastsim_generator0', self.observable, jetR, obs_label, reg_param, int(min_pt_truth), int(max_pt_truth)) + h_reference = getattr(self, name_reference) + + # Calculate systematic average for final fast simulation uncertainty + maxbin_adj = maxbin + 1 if (grooming_setting and maxbin) else maxbin + n_generators = len([i for i in self.systematics_list if 'generator' in i]) + hSystematic_fastsim = self.construct_systematic_average( + h_reference, 'fastsim_generator', jetR, obs_label, reg_param, min_pt_truth, max_pt_truth, + minbin, maxbin_adj, n_ratios=n_generators-1, takeMaxDev=False) + setattr(self, name, hSystematic_fastsim) + h_list.append(hSystematic_fastsim) + # Construct total systematic uncertainty: Add all systematic uncertainties in quadrature name = 'hSystematic_Total_R{}_{}_n{}_{}-{}'.format( - self.utils.remove_periods(jetR), obs_label, - reg_param, int(min_pt_truth), int(max_pt_truth)) + self.utils.remove_periods(jetR), obs_label, reg_param, int(min_pt_truth), int(max_pt_truth)) hSystematic_Total = self.add_in_quadrature(h_list, new_name=name) setattr(self, name, hSystematic_Total) # Attach total systematic to main result, and save as an attribute name = 'hResult_{}_systotal_R{}_{}_n{}_{}-{}'.format( - self.observable, jetR, obs_label, reg_param, - int(min_pt_truth), int(max_pt_truth)) + self.observable, jetR, obs_label, reg_param, int(min_pt_truth), int(max_pt_truth)) if grooming_setting and maxbin: hResult_sys = self.truncate_hist(hMain.Clone(), minbin, maxbin+1, name) else: @@ -701,7 +731,7 @@ def compute_obs_systematic(self, jetR, obs_label, obs_setting, grooming_setting, self.observable, jetR, obs_label, int(min_pt_truth), int(max_pt_truth)) setattr(self, name, hResult_sys) - + if self.debug_level > 0: name = 'hSystematic_Total_R{}_{}_{}-{}{}'.format( self.utils.remove_periods(jetR), obs_label, @@ -719,6 +749,13 @@ def compute_obs_systematic(self, jetR, obs_label, obs_setting, grooming_setting, jetR, obs_label, obs_setting, grooming_setting, min_pt_truth, max_pt_truth, minbin, maxbin, h_unfolding_list, hSystematic_Unfolding, suffix='Unfolding') + if do_generator: + # Plot fastsim uncertainties + self.plot_systematic_uncertainties( + jetR, obs_label, obs_setting, grooming_setting, + min_pt_truth, max_pt_truth, minbin, maxbin, h_fastsim_list, hSystematic_fastsim, suffix='fastsim_generator') + + #---------------------------------------------------------------------- # Retrieve a given systematic histogram def retrieve_systematic(self, systematic, jetR, obs_label, @@ -736,8 +773,8 @@ def retrieve_systematic(self, systematic, jetR, obs_label, sys_label = 'subtraction' else: return None - elif systematic in ['fastsim_generator0', 'fastsim_generator1']: - if systematic == 'fastsim_generator1': + elif 'fastsim_generator' in systematic: + if int(systematic[-1]) == (len(self.fastsim_response_list) - 1): sys_label = 'generator' else: return None @@ -765,7 +802,7 @@ def retrieve_systematic(self, systematic, jetR, obs_label, systematic, self.utils.remove_periods(jetR), obs_label, int(min_pt_truth), int(max_pt_truth), self.file_format) outputFilename = os.path.join(output_dir, name) - self.utils.plot_hist(h_systematic_ratio, outputFilename, 'P E') + self.utils.plot_hist(h_systematic_ratio, outputFilename, 'P E') return h_systematic_ratio @@ -773,10 +810,10 @@ def retrieve_systematic(self, systematic, jetR, obs_label, # Get systematic variation and save percentage difference as attribte def construct_systematic(self, systematic, hMain, jetR, obs_label, obs_setting, grooming_setting, reg_param, min_pt_truth, max_pt_truth, minbin, maxbin): - + # Set whether to store signed uncertainty value or absolute value # For now, only use signed uncertainty in cases where we don't average/combine multiple sources - signed = systematic in ['trkeff', 'fastsim_generator1'] + signed = (systematic == 'trkeff') or ('fastsim_generator' in systematic) if grooming_setting and maxbin: maxbin += 1 @@ -786,7 +823,7 @@ def construct_systematic(self, systematic, hMain, jetR, obs_label, obs_setting, h_systematic_ratio = self.construct_systematic_average( hMain, 'RegParam', jetR, obs_label, reg_param, min_pt_truth, max_pt_truth, minbin, maxbin, takeMaxDev=False) - + elif systematic in ['prior1', 'prior2']: if systematic == 'prior1': h_systematic_ratio = self.construct_systematic_average( @@ -794,23 +831,7 @@ def construct_systematic(self, systematic, hMain, jetR, obs_label, obs_setting, min_pt_truth, max_pt_truth, minbin, maxbin, takeMaxDev=True) else: return None - - # For model-depedence, take the difference between two fastsim generators - elif systematic in ['fastsim_generator0', 'fastsim_generator1']: - if systematic == 'fastsim_generator1': - # Get reference generator unfolded result (e.g. pythia fastsim) - name = 'h{}_{}_R{}_{}_n{}_{}-{}'.format( - 'fastsim_generator0', self.observable, jetR, obs_label, - reg_param, min_pt_truth, max_pt_truth) - h_reference = getattr(self, name) - - # Take the difference of generator to reference generator (e.g. herwig fastsim) - h_systematic_ratio = self.construct_systematic_percentage( - h_reference, 'fastsim_generator1', jetR, obs_label, - reg_param, min_pt_truth, max_pt_truth, minbin, maxbin, signed=signed) - else: - return None - + elif systematic in ['subtraction1', 'subtraction2']: if systematic == 'subtraction1': h_systematic_ratio = self.construct_systematic_average( @@ -818,34 +839,72 @@ def construct_systematic(self, systematic, hMain, jetR, obs_label, obs_setting, min_pt_truth, max_pt_truth, minbin, maxbin, takeMaxDev=True) else: return None - + + elif "generator" in systematic: + if systematic[-1] != '0': + # Get reference generator unfolded result (e.g. pythia fastsim) + name_reference = 'h{}_{}_R{}_{}_n{}_{}-{}'.format( + 'fastsim_generator0', self.observable, jetR, obs_label, + reg_param, int(min_pt_truth), int(max_pt_truth)) + h_reference = getattr(self, name_reference) + + h_systematic_ratio = self.construct_systematic_percentage( + h_reference, systematic, jetR, obs_label, reg_param, + min_pt_truth, max_pt_truth, minbin, maxbin, signed=signed) + else: + return None + elif systematic == 'thermal_closure': fname = 'nonclosureR{}_{}_{}_{}-{}.root'.format(jetR, obs_setting, grooming_setting, int(min_pt_truth), int(max_pt_truth)) outf_name = os.path.join(getattr(self, 'output_dir_thermal_closure'), 'Test_ThermalClosure') f_nonclosure = ROOT.TFile(os.path.join(outf_name, fname), 'READ') - h_systematic_ratio_temp = f_nonclosure.Get('hNonclosureRatio') + h_systematic_ratio_temp = f_nonclosure.Get('hNonclosureRatio_n' + str(reg_param)) h_systematic_ratio_temp.SetDirectory(0) f_nonclosure.Close() - + name_ratio = 'hSystematic_{}_{}_R{}_{}_n{}_{}-{}'.format( self.observable, systematic, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) self.change_to_per(h_systematic_ratio_temp) + self.subtract_statistics(h_systematic_ratio_temp) h_systematic_ratio_temp.SetMinimum(0.) h_systematic_ratio_temp.SetMaximum(1.5*h_systematic_ratio_temp.GetMaximum()) h_systematic_ratio = self.truncate_hist(h_systematic_ratio_temp, minbin, maxbin, name_ratio) setattr(self, name_ratio, h_systematic_ratio) - + else: h_systematic_ratio = self.construct_systematic_percentage( hMain, systematic, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth, minbin, maxbin, signed=signed) - + name = 'hSystematic_{}_{}_R{}_{}_n{}_{}-{}'.format(self.observable, systematic, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) setattr(self, name, h_systematic_ratio) - + return h_systematic_ratio + #---------------------------------------------------------------------- + # "Subtract" statistical uncertainties from main data points + # Assumes (bin content)^2 = (bin error)^2 + (other)^2 + # If bin error > bin content, sets bin content to 0; else sets to "other" + def subtract_statistics(self, h, set_error_zero=False): + + for bin in range(1, h.GetNbinsX()+1): + + content = h.GetBinContent(bin) + error = h.GetBinError(bin) + + # Check if uncertainties are larger than bin content + if error > content: + h.SetBinContent(bin, 0) + if set_error_zero: + h.SetBinError(bin, 0) + continue + + new_content = math.sqrt(content * content - error * error) + h.SetBinContent(bin, new_content) + if set_error_zero: + h.SetBinError(bin, 0) + #---------------------------------------------------------------------- # Get systematic variation and save percentage difference as attribte def construct_systematic_percentage(self, hMain, systematic, jetR, @@ -856,9 +915,9 @@ def construct_systematic_percentage(self, hMain, systematic, jetR, reg_param, min_pt_truth, max_pt_truth) h_systematic = getattr(self, name) - if 'fastsim_generator1' in systematic: - systematic = 'generator' # For generator systematic, need to set label here - + #if 'fastsim_generator1' in systematic: + # systematic = 'generator' # For generator systematic, need to set label here + # Normalization #integral = hMain.Integral(2, hMain.GetNbinsX(), 'width') #hMain.Scale(1./integral) @@ -887,20 +946,18 @@ def construct_systematic_percentage(self, hMain, systematic, jetR, #---------------------------------------------------------------------- def construct_systematic_average(self, hMain, sys_label, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth, - minbin, maxbin, takeMaxDev=False): - - h_systematic_ratio1 = self.construct_systematic_percentage( - hMain, '{}1'.format(sys_label), jetR, obs_label, reg_param, - min_pt_truth, max_pt_truth, minbin, maxbin) - h_systematic_ratio2 = self.construct_systematic_percentage( - hMain, '{}2'.format(sys_label), jetR, obs_label, - reg_param, min_pt_truth, max_pt_truth, minbin, maxbin) - - name = 'hSystematic_{}_{}_R{}_{}_n{}_{}-{}'.format(self.observable, sys_label, jetR, obs_label, - reg_param, min_pt_truth, max_pt_truth) - h_systematic_ratio = self.build_average(h_systematic_ratio1, h_systematic_ratio2, - takeMaxDev=takeMaxDev) - h_systematic_ratio.SetName(name) + minbin, maxbin, n_ratios=2, takeMaxDev=False): + + ratios = [] + for i in range(1, n_ratios+1): + h_systematic_ratio = self.construct_systematic_percentage( + hMain, '%s%i' % (sys_label, i), jetR, obs_label, reg_param, + min_pt_truth, max_pt_truth, minbin, maxbin) + ratios.append(h_systematic_ratio) + + name = 'hSystematic_{}_{}_R{}_{}_n{}_{}-{}'.format( + self.observable, sys_label, jetR, obs_label, reg_param, min_pt_truth, max_pt_truth) + h_systematic_ratio = self.build_average(ratios, name, takeMaxDev=takeMaxDev) setattr(self, name, h_systematic_ratio) return h_systematic_ratio @@ -978,13 +1035,15 @@ def plot_systematic_uncertainties(self, jetR, obs_label, obs_setting, grooming_s myBlankHisto.SetXTitle( getattr(self, 'xtitle') ) myBlankHisto.GetYaxis().SetTitleOffset(1.5) myBlankHisto.SetYTitle('Systematic uncertainty (%)') - myBlankHisto.SetMaximum(2.7*h_total.GetMaximum(50)) - if not suffix=="Unfolding": - myBlankHisto.SetMinimum(-1.1*h_total.GetMaximum(50)) - else: - # Unfolding uncertainties do not go below 0 - myBlankHisto.SetMinimum(0) - myBlankHisto.Draw("E") + minrooti = 2 if grooming_setting else 1 + if minbin: + minrooti += minbin + y_list = [h_total.GetBinContent(i) for i in range(minrooti, n_bins_truth + minrooti)] + max_y = max(y_list) + min_y = 0 + # Unfolding uncertainties do not go below 0 + if not suffix == "Unfolding": + min_y = min(y_list) leg = ROOT.TLegend(0.67,0.6,0.8,0.92) self.utils.setup_legend(leg,0.04) @@ -997,17 +1056,21 @@ def plot_systematic_uncertainties(self, jetR, obs_label, obs_setting, grooming_s h.SetLineColor(self.ColorArray[i]) h.SetLineStyle(1) h.SetLineWidth(2) - if h.GetMaximum() > h_total.GetMaximum(): - myBlankHisto.SetMaximum(1.7*h.GetMaximum()) - - h.DrawCopy('P X0 same') + y_list = [h.GetBinContent(i) for i in range(minrooti, n_bins_truth + minrooti)] + new_max_y = max(y_list) + if new_max_y > max_y: + max_y = new_max_y + new_min_y = min(y_list) + if new_min_y < min_y: + min_y = new_min_y legend_label = '' for systematic in self.systematics_list: + if systematic in h.GetName(): + legend_label = systematic.replace('fastsim_', '') + if not legend_label: if 'Unfolding' in h.GetName(): legend_label = 'unfolding' - elif systematic in h.GetName(): - legend_label = systematic elif 'RegParam' in h.GetName(): legend_label = 'reg param' elif 'prior' in h.GetName(): @@ -1018,17 +1081,29 @@ def plot_systematic_uncertainties(self, jetR, obs_label, obs_setting, grooming_s legend_label = 'subtraction' leg.AddEntry(h, legend_label, 'P') + # Draw that now the maximum has been found + max_y = 1.9 * max([abs(min_y), max_y]) + min_y = 1.1 * min_y + myBlankHisto.SetMaximum(max_y) + myBlankHisto.SetMinimum(min_y) + myBlankHisto.Draw("E") + for i, h in enumerate(h_list): + if h: + h.SetMaximum(max_y) + h.SetMinimum(min_y) + h.DrawCopy('P X0 same') + h_total.SetLineStyle(1) h_total.SetLineColor(1) h_total.SetLineWidth(2) h_total.DrawCopy('same hist') - leg.AddEntry(h_total, 'Total {}'.format(suffix), 'l') + leg.AddEntry(h_total, 'Total {}'.format(suffix.replace('fastsim_', '')), 'l') leg.Draw() text_latex = ROOT.TLatex() text_latex.SetNDC() - text = str(min_pt_truth) + ' < #it{p}_{T, ch jet} < ' + str(max_pt_truth) + text = str(min_pt_truth) + ' < #it{p}_{T}^{ch jet} < ' + str(max_pt_truth) + ' GeV/#it{c}' text_latex.DrawLatex(0.3, 0.85, text) text_latex = ROOT.TLatex() @@ -1069,27 +1144,29 @@ def plot_systematic_uncertainties(self, jetR, obs_label, obs_setting, grooming_s def truncate_hist(self, h, minbin, maxbin, new_name): length = h.GetNbinsX() - # Check if either minbin or maxbin exist, and if so set bin indices - if maxbin == None and minbin == None: - h.SetNameTitle(new_name, new_name) - return h - else: - if maxbin == None: - bin_range = range(minbin+1, length+2) - if minbin >= length: - raise ValueError(f"Min bin number {minbin} larger or equal to histogram size {length}") - if minbin < 1: - raise ValueError(f"Min bin number {minbin} cannot be less than 1") - - elif minbin == None: - bin_range = range(1, maxbin+2) - if maxbin >= length: - raise ValueError(f"Max bin number {maxbin} larger or equal to histogram size {length}") - if maxbin < 1: - raise ValueError(f"Max bin number {maxbin} cannot be less than 1") - - bin_edges = array('d', [h.GetXaxis().GetBinLowEdge(i) for i in bin_range]) - return h.Rebin(len(bin_edges)-1, new_name, bin_edges) + # Check if either minbin or maxbin exist, and if so set bin indices + if maxbin == None: + if minbin == None: + h.SetNameTitle(new_name, new_name) + return h + bin_range = range(minbin+1, length+2) + if minbin >= length: + raise ValueError(f"Min bin number {minbin} larger or equal to histogram size {length}") + if minbin < 1: + raise ValueError(f"Min bin number {minbin} cannot be less than 1") + + elif minbin == None: + if maxbin == length: + h.SetNameTitle(new_name, new_name) + return h + bin_range = range(1, maxbin+2) + if maxbin > length: + raise ValueError(f"Max bin number {maxbin} larger than histogram size {length}") + if maxbin < 1: + raise ValueError(f"Max bin number {maxbin} cannot be less than 1") + + bin_edges = array('d', [h.GetXaxis().GetBinLowEdge(i) for i in bin_range]) + return h.Rebin(len(bin_edges)-1, new_name, bin_edges) #---------------------------------------------------------------------- # Add a list of (identically-binned) histograms in quadrature, bin-by-bin @@ -1205,7 +1282,7 @@ def write_hepdata(self): min_pt_truth, max_pt_truth) # Write submission files - self.hepdata_submission.create_files(self.hepdata_dir) + self.hepdata_submission.create_files(self.hepdata_dir, remove_old=True) #---------------------------------------------------------------------- def add_hepdata_table(self, i, jetR, obs_label, obs_setting, grooming_setting, min_pt, max_pt): @@ -1226,8 +1303,8 @@ def add_hepdata_table(self, i, jetR, obs_label, obs_setting, grooming_setting, m hepdata_reader_systematics = hepdata_lib.RootFileReader(systematics_root_filename) # Define variables - h_name = 'hmain_{}_R{}_{}_{}-{}'.format(self.observable, jetR, obs_label, min_pt, max_pt) - if self.observable == 'ang': + h_name = 'hmain_{}_R{}_{}_{}-{}'.format(self.observable, jetR, obs_label, min_pt, max_pt).replace('__', '_') + if self.observable == 'ang' or self.observable == 'mass': h_name += '_trunc' h = hepdata_reader.read_hist_1d(h_name) @@ -1251,7 +1328,7 @@ def add_hepdata_table(self, i, jetR, obs_label, obs_setting, grooming_setting, m # Define uncertainties stat = hepdata_lib.Uncertainty('stat', is_symmetric=True) - stat.values = [float('{:.2g}'.format(dy)) for dy in h['dy']] + stat.values = h['dy'] #float('{:.2g}'.format(dy)) # Add tables to submission table.add_variable(x) @@ -1263,30 +1340,36 @@ def add_hepdata_table(self, i, jetR, obs_label, obs_setting, grooming_setting, m int(min_pt), int(max_pt)) h_sys_unfolding = hepdata_reader_systematics.read_hist_1d(getattr(self, name).GetName()) sys_unfolding = hepdata_lib.Uncertainty('sys,unfolding', is_symmetric=True) - sys_unfolding.values = ['{:.2g}%'.format(y) for y in h_sys_unfolding['y']] + #sys_unfolding_percent = h_sys_unfolding['y'] # Percent uncertainty + sys_unfolding.values = [h_sys_unfolding['y'][i] * y.values[i] / 100. for i in range(len(y.values))] y.add_uncertainty(sys_unfolding) - + + # Add generator systematic + name = 'hSystematic_generator_R{}_{}_{}-{}'.format(self.utils.remove_periods(jetR), obs_label, + int(min_pt), int(max_pt)) + h_sys_generator = hepdata_reader_systematics.read_hist_1d(getattr(self, name).GetName()) + sys_generator = hepdata_lib.Uncertainty('sys,generator', is_symmetric=True) + #sys_generator_percent = h_sys_generator['y'] # Percent uncertainty + sys_generator.values = [h_sys_generator['y'][i] * y.values[i] / 100. for i in range(len(y.values))] + y.add_uncertainty(sys_generator) + # Add systematic uncertainty breakdown for systematic in self.systematics_list: - - if systematic in ['main', 'prior1', 'truncation', 'binning']: + + if systematic in ['main', 'prior1', 'truncation', 'binning'] or 'generator' in systematic: continue - h_sys = self.retrieve_systematic(systematic, jetR, obs_label, - None, min_pt, max_pt) + h_sys = self.retrieve_systematic( + systematic, jetR, obs_label, None, min_pt, max_pt) if not h_sys: continue - - if 'generator' in systematic: - sys_label = 'generator' - else: - sys_label = systematic - + h_sys = hepdata_reader_systematics.read_hist_1d(h_sys.GetName()) - sys = hepdata_lib.Uncertainty('sys,{}'.format(sys_label), is_symmetric=True) - sys.values = ['{:.2g}%'.format(y) for y in h_sys['y']] + sys = hepdata_lib.Uncertainty('sys,{}'.format(systematic), is_symmetric=True) + #sys_percent = h_sys['y'] # Percent uncertainty + sys.values = [h_sys['y'][i] * y.values[i] / 100. for i in range(len(y.values))] y.add_uncertainty(sys) - + # Add table to the submission self.hepdata_submission.add_table(table) @@ -1295,7 +1378,8 @@ def set_hepdata_table_index(self, i, jetR, min_pt): index = 1 index += i - if self.observable == 'ang': + # Jet angularities in pp + if self.observable == 'ang' and self.is_pp: if np.isclose(jetR, 0.4): if np.isclose(min_pt, 40.): index += 8 @@ -1312,57 +1396,200 @@ def set_hepdata_table_index(self, i, jetR, min_pt): index += 48 if np.isclose(min_pt, 80.): index += 56 - + + # Jet angularities in Pb-Pb and mass in pp and Pb-Pb + elif self.observable == 'ang' and not self.is_pp: + #if np.isclose(min_pt, 40.): + # index += 0 + if np.isclose(min_pt, 60.): + index += 8 + elif np.isclose(min_pt, 80.): + index += 16 + elif np.isclose(min_pt, 100.): + index += 24 + elif self.observable == "mass": + if self.is_pp: + if np.isclose(min_pt, 40.): + index += 32 + elif np.isclose(min_pt, 60.): + index += 34 + elif np.isclose(min_pt, 80.): + index += 36 + else: # Pb-Pb + if np.isclose(min_pt, 40.): + index += 38 + if np.isclose(min_pt, 60.): + index += 40 + elif np.isclose(min_pt, 80.): + index += 42 + elif np.isclose(min_pt, 100.): + index += 44 + return index #---------------------------------------------------------------------- def set_hepdata_table_descriptors(self, table, jetR, obs_label, obs_setting, grooming_setting, min_pt, max_pt): - - if self.observable == 'ang': - + + if self.observable == 'mass': + if grooming_setting and 'sd' in grooming_setting.keys(): - - if np.isclose(jetR, 0.4): - table.location = 'Figure 3.' - elif np.isclose(jetR, 0.2): - table.location = 'Figure 4.' - + if np.isclose(min_pt, 40.): + table.location = "Figure 1, right (letter), Figure 18, bottom (public note)." + if np.isclose(min_pt, 60.): + table.location = "Figure 19, bottom (public note)." + if np.isclose(min_pt, 80.): + table.location = "Figure 20, bottom (public note)." + if np.isclose(min_pt, 100.): + table.location = "Figure 21, right (public note)." + + table.description = r'Groomed jet invariant mass $m_{\mathrm{jet},g}$ in ' + (r"pp" if self.is_pp else r"Pb--Pb") + r" data." + table.description += '\n' + table.description += r'${} value2: - avg = value1 - else: # value2 > value1: - avg = value2 + if takeMaxDev: + for i in range(1, h_avg.GetNbinsX()+1): + max_val = -1 + for h in h_list: + val = h.GetBinContent(i) + if val > max_val: + max_val = val + h_avg.SetBinContent(i, max_val) - h_avg.SetBinContent(i, avg) + else: + for i in range(1, h_avg.GetNbinsX()+1): + sum = 0 + for h in h_list: + sum += h.GetBinContent(i) + avg = sum / len(h_list) + h_avg.SetBinContent(i, avg) return h_avg diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_performance.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_performance.yaml new file mode 100644 index 000000000..a20ac9a2b --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_performance.yaml @@ -0,0 +1,197 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [40, 60, 80, 100, 150] + plot_overlay_list: + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] + max_reg_param: 20 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.55, 0.75] + #obs_max_reported: [0.55] + obs_bins_det: [0, 0.08, 0.12, 0.16, 0.20, 0.24, 0.28, 0.32, 0.36, 0.42, 0.55, 0.75] + obs_bins_det_sys_binning: [0, 0.07, 0.11, 0.15, 0.19, 0.24, 0.29, 0.33, 0.37, 0.42, 0.54, 0.75] + reg_param: + 0.2: 15 + + config_R0.2_1.5: + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.08, 0.1, 0.14, 0.18, 0.2, 0.24, 0.3, 0.5, 0.7] + #obs_max_reported: [0.5] + obs_bins_det: [0, 0.05, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.20, 0.22, 0.26, 0.3, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.045, 0.075, 0.09, 0.11, 0.13, 0.15, 0.17, 0.19, + 0.21, 0.25, 0.32, 0.5, 0.7] + reg_param: + 0.2: 15 + + config_R0.2_2: + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.13, 0.16, 0.2, 0.3, 0.40, 0.7] + #obs_max_reported: [0.3] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.18, 0.25, 0.40, 0.7] + obs_bins_det_sys_binning: [0, 0.015, 0.045, 0.055, 0.065, 0.075, 0.085, 0.11, 0.13, 0.15, + 0.17, 0.19, 0.21, 0.27, 0.40, 0.7] + reg_param: + 0.2: 15 + + config_R0.2_3: + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.08, 0.1, 0.15, 0.25, 0.35] + #obs_max_reported: [0.25] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.09, 0.11, 0.14, 0.18, 0.25, 0.35] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.03, 0.04, 0.045, 0.055, 0.065, + 0.095, 0.12, 0.15, 0.19, 0.25, 0.35] + reg_param: + 0.2: 15 + + ############################################################################ + # Different R & alpha configurations for SD + config_R0.2_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.55, 0.7] + #obs_max_reported: [0.55] + obs_bins_det: [0, 0.04, 0.08, 0.12, 0.15, 0.18, 0.21, 0.24, 0.27, 0.3, 0.38, 0.55, 0.7] + obs_bins_det_sys_binning: [0, 0.05, 0.09, 0.13, 0.16, 0.19, 0.22, 0.25, 0.28, 0.31, 0.38, 0.56, 0.7] + reg_param: + 0.2: 10 + + config_R0.2_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.05, 0.08, 0.11, 0.15, 0.2, 0.3, 0.5, 0.7] + #obs_max_reported: [0.5] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.06, 0.08, 0.1, 0.15, 0.2, 0.3, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.04, 0.065, 0.085, 0.11, 0.16, 0.21, 0.31, 0.48, 0.7] + reg_param: + 0.2: 10 + + config_R0.2_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.13, 0.16, 0.25, 0.45, 0.65] + #obs_max_reported: [0.25] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.05, 0.07, 0.1, 0.12, 0.15, 0.25, 0.4, 0.65] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.025, 0.035, 0.05, 0.075, 0.1, + 0.13, 0.16, 0.26, 0.45, 0.65] + reg_param: + 0.2: 10 + + config_R0.2_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.005, 0.01, 0.02, 0.03, 0.05, 0.06, 0.08, 0.1, 0.15, 0.3, 0.5] + #obs_max_reported: [0.15] + obs_bins_det: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.08, 0.1, 0.15, 0.3, 0.5] + obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.06, 0.09, 0.11, 0.16, 0.32, 0.5] + #reg_param: + # 0.2: 10 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_performance" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: False +do_plot_final_result: False +do_plot_performance: True +figure_approval_status: 'Preliminary' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/AnalysisResultsFinal.root' + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin2.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin2.yaml new file mode 100644 index 000000000..250ed3521 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin2.yaml @@ -0,0 +1,229 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [40, 60] + plot_overlay_list: + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] + max_reg_param: 20 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.55, 0.75] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.08, 0.12, 0.16, 0.20, 0.24, 0.28, 0.32, 0.36, 0.42, 0.55, 0.75] + obs_bins_det_sys_binning: [0, 0.07, 0.11, 0.15, 0.19, 0.24, 0.29, 0.33, 0.37, 0.42, 0.54, 0.75] + reg_param: + 0.2: 15 + + config_R0.2_1.5: + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.08, 0.1, 0.14, 0.18, 0.2, 0.24, 0.3, 0.5, 0.7] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.05, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.20, 0.22, 0.26, 0.3, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.045, 0.075, 0.09, 0.11, 0.13, 0.15, 0.17, 0.19, + 0.21, 0.25, 0.32, 0.5, 0.7] + reg_param: + 0.2: 15 + + config_R0.2_2: + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.13, 0.16, 0.2, 0.3, 0.40, 0.7] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.18, 0.25, 0.40, 0.7] + obs_bins_det_sys_binning: [0, 0.015, 0.045, 0.055, 0.065, 0.075, 0.085, 0.11, 0.13, 0.15, + 0.17, 0.19, 0.21, 0.27, 0.40, 0.7] + reg_param: + 0.2: 15 + + config_R0.2_3: + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.08, 0.1, 0.15, 0.25, 0.35] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.09, 0.11, 0.14, 0.18, 0.25, 0.35] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.03, 0.04, 0.045, 0.055, 0.065, + 0.095, 0.12, 0.15, 0.19, 0.25, 0.35] + reg_param: + 0.2: 15 + + ############################################################################ + # Different R & alpha configurations for SD + config_R0.2_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.55, 0.7] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.04, 0.08, 0.12, 0.15, 0.18, 0.21, 0.24, 0.27, 0.3, 0.38, 0.55, 0.7] + obs_bins_det_sys_binning: [0, 0.05, 0.09, 0.13, 0.16, 0.19, 0.22, 0.25, 0.28, 0.31, 0.38, 0.56, 0.7] + reg_param: + 0.2: 10 + + config_R0.2_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.05, 0.08, 0.11, 0.15, 0.2, 0.3, 0.5, 0.7] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.06, 0.08, 0.1, 0.15, 0.2, 0.3, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.04, 0.065, 0.085, 0.11, 0.16, 0.21, 0.31, 0.48, 0.7] + reg_param: + 0.2: 10 + + config_R0.2_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.13, 0.16, 0.25, 0.45, 0.65] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.05, 0.07, 0.1, 0.12, 0.15, 0.25, 0.4, 0.65] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.025, 0.035, 0.05, 0.075, 0.1, + 0.13, 0.16, 0.26, 0.45, 0.65] + reg_param: + 0.2: 10 + + config_R0.2_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.005, 0.01, 0.02, 0.03, 0.05, 0.06, 0.08, 0.1, 0.15, 0.3, 0.5] + obs_max_reported: [0.15] + obs_bins_det: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.08, 0.1, 0.15, 0.3, 0.5] + obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.06, 0.09, 0.11, 0.16, 0.32, 0.5] + reg_param: + 0.2: 10 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin2" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin2/ang/final_results/fFinalResults.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/987484/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/934787/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", + "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin2_girth_comp.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin2_girth_comp.yaml new file mode 100644 index 000000000..0245099e6 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin2_girth_comp.yaml @@ -0,0 +1,111 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [40, 60] + plot_overlay_list: + - ['config_R0.2_1'] + max_reg_param: 20 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.1, 0.15, 0.20, 0.25, 0.30, 0.35, 0.4, 0.6, 0.75] + obs_max_reported: [0.6] + obs_bins_det: [0, 0.08, 0.12, 0.16, 0.20, 0.24, 0.28, 0.32, 0.36, 0.42, 0.55, 0.75] + obs_bins_det_sys_binning: [0, 0.07, 0.11, 0.15, 0.19, 0.24, 0.29, 0.33, 0.37, 0.42, 0.54, 0.75] + reg_param: + 0.2: 15 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin2_girth_comp/" +results_pp: "/rstorage/alice/AnalysisResults/ang/PbPb/PbPb_girth_2.76TeV_R02_40-60.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: True + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/987484/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/934787/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin3.yaml new file mode 100644 index 000000000..84173d801 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin3.yaml @@ -0,0 +1,226 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [60, 80] + plot_overlay_list: + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] + max_reg_param: 10 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.1, 0.15, 0.2, 0.25, 0.3, 0.5, 0.75] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.1, 0.15, 0.20, 0.25, 0.3, 0.5, 0.75] + obs_bins_det_sys_binning: [0, 0.09, 0.14, 0.19, 0.24, 0.32, 0.5, 0.75] + reg_param: + 0.2: 5 + + config_R0.2_1.5: + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.05, 0.08, 0.12, 0.17, 0.25, 0.4, 0.7] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.05, 0.08, 0.12, 0.17, 0.25, 0.4, 0.7] + obs_bins_det_sys_binning: [0, 0.045, 0.075, 0.11, 0.16, 0.27, 0.4, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_2: + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.13, 0.2, 0.3, 0.5, 0.7] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.12, 0.18, 0.3, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.025, 0.045, 0.065, 0.085, 0.13, 0.15, 0.21, 0.3, 0.5, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_3: + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.01, 0.02, 0.04, 0.06, 0.08, 0.12, 0.25, 0.35] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.06, 0.08, 0.12, 0.25, 0.35] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.045, 0.065, 0.09, 0.13, 0.25, 0.35] + reg_param: + 0.2: 5 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.04, 0.08, 0.15, 0.2, 0.25, 0.3, 0.4, 0.55, 0.7] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.08, 0.12, 0.16, 0.2, 0.25, 0.3, 0.4, 0.55, 0.7] + obs_bins_det_sys_binning: [0, 0.015, 0.035, 0.055, 0.075, 0.11, 0.15, 0.2, 0.26, 0.31, 0.41, 0.54, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.015, 0.03, 0.06, 0.1, 0.15, 0.25, 0.4, 0.6] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.015, 0.03, 0.045, 0.06, 0.08, 0.1, 0.15, 0.25, 0.4, 0.6] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.04, 0.06, 0.08, 0.11, 0.16, 0.26, 0.42, 0.6] + reg_param: + 0.2: 5 + + config_R0.2_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.005, 0.01, 0.02, 0.03, 0.05, 0.07, 0.1, 0.2, 0.35, 0.55] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1, 0.2, 0.35, 0.55] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.025, 0.035, 0.045, 0.055, 0.075, 0.11, 0.21, 0.37, 0.55] + reg_param: + 0.2: 5 + + config_R0.2_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 150] + obs_bins_truth: [0, 0.005, 0.01, 0.02, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.45] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1, 0.15, 0.25, 0.45] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.025, 0.035, 0.045, 0.055, 0.075, 0.11, 0.16, 0.26, 0.45] + reg_param: + 0.2: 5 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin3" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin3/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/987484/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/934787/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", + "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin3_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin3_pTcut.yaml new file mode 100644 index 000000000..f2c2a6ec7 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin3_pTcut.yaml @@ -0,0 +1,218 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [60, 80] + plot_overlay_list: + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] + max_reg_param: 20 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.1, 0.15, 0.2, 0.25, 0.3, 0.5, 0.75] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.1, 0.15, 0.20, 0.25, 0.3, 0.5, 0.75] + obs_bins_det_sys_binning: [0, 0.09, 0.14, 0.19, 0.24, 0.32, 0.5, 0.75] + reg_param: + 0.2: 5 + + config_R0.2_1.5: + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.05, 0.08, 0.12, 0.17, 0.25, 0.4, 0.7] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.05, 0.08, 0.12, 0.17, 0.25, 0.4, 0.7] + obs_bins_det_sys_binning: [0, 0.045, 0.075, 0.11, 0.16, 0.27, 0.4, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_2: + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.13, 0.2, 0.3, 0.5, 0.7] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.12, 0.18, 0.3, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.025, 0.045, 0.065, 0.085, 0.13, 0.15, 0.21, 0.3, 0.5, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_3: + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.01, 0.02, 0.04, 0.06, 0.08, 0.12, 0.25, 0.35] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.06, 0.08, 0.12, 0.25, 0.35] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.045, 0.065, 0.09, 0.13, 0.25, 0.35] + reg_param: + 0.2: 5 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.04, 0.08, 0.15, 0.2, 0.25, 0.3, 0.4, 0.55, 0.7] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.08, 0.12, 0.16, 0.2, 0.25, 0.3, 0.4, 0.55, 0.7] + obs_bins_det_sys_binning: [0, 0.015, 0.035, 0.055, 0.075, 0.11, 0.15, 0.2, 0.26, 0.31, 0.41, 0.54, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.015, 0.03, 0.06, 0.1, 0.15, 0.25, 0.4, 0.6] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.015, 0.03, 0.045, 0.06, 0.08, 0.1, 0.15, 0.25, 0.4, 0.6] + obs_bins_det_sys_binning: [0, 0.01, 0.025, 0.04, 0.06, 0.08, 0.11, 0.16, 0.26, 0.42, 0.6] + reg_param: + 0.2: 5 + + config_R0.2_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.005, 0.01, 0.02, 0.03, 0.05, 0.07, 0.1, 0.2, 0.35, 0.55] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1, 0.2, 0.35, 0.55] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.025, 0.035, 0.045, 0.055, 0.075, 0.11, 0.21, 0.37, 0.55] + reg_param: + 0.2: 5 + + config_R0.2_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 150] + obs_bins_truth: [0, 0.005, 0.01, 0.02, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.45] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1, 0.15, 0.25, 0.45] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.025, 0.035, 0.045, 0.055, 0.075, 0.11, 0.16, 0.26, 0.45] + reg_param: + 0.2: 5 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin3_pTcut" +results_pp: "/rstorage/alice/AnalysisResults/ang/AngR02_ptbin3/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Internal' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/879564/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/987484/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/934787/AnalysisResultsFinal.root'] +# '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal_no1-5.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/AnalysisResultsFinal.root' + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin4.yaml new file mode 100644 index 000000000..07279797a --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin4.yaml @@ -0,0 +1,226 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [80, 100] + plot_overlay_list: + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] + max_reg_param: 10 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.1, 0.15, 0.2, 0.25, 0.3, 0.5, 0.75] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.1, 0.15, 0.20, 0.25, 0.3, 0.5, 0.75] + obs_bins_det_sys_binning: [0, 0.09, 0.14, 0.19, 0.24, 0.32, 0.52, 0.75] + reg_param: + 0.2: 5 + + config_R0.2_1.5: + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.04, 0.08, 0.12, 0.17, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.04, 0.08, 0.12, 0.17, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.045, 0.075, 0.11, 0.16, 0.38, 0.6] + reg_param: + 0.2: 5 + + config_R0.2_2: + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.02, 0.04, 0.07, 0.1, 0.25, 0.5] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.02, 0.04, 0.07, 0.1, 0.25, 0.5] + obs_bins_det_sys_binning: [0, 0.025, 0.045, 0.085, 0.13, 0.27, 0.5] + reg_param: + 0.2: 5 + + config_R0.2_3: + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.01, 0.02, 0.04, 0.07, 0.1, 0.4] + obs_max_reported: [0.1] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.07, 0.1, 0.4] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.035, 0.075, 0.11, 0.4] + reg_param: + 0.2: 5 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.05, 0.1, 0.2, 0.3, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.05, 0.1, 0.2, 0.3, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.04, 0.08, 0.17, 0.26, 0.47, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.02, 0.05, 0.1, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.02, 0.05, 0.1, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.025, 0.06, 0.11, 0.37, 0.6] + reg_param: + 0.2: 5 + + config_R0.2_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.005, 0.01, 0.03, 0.08, 0.25, 0.5] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.005, 0.01, 0.03, 0.08, 0.25, 0.5] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.035, 0.1, 0.28, 0.5] + reg_param: + 0.2: 5 + + config_R0.2_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 0.001, 0.005, 0.02, 0.05, 0.1, 0.3] + obs_max_reported: [0.1] + obs_bins_det: [0, 0.001, 0.005, 0.02, 0.05, 0.1, 0.3] + obs_bins_det_sys_binning: [0, 0.002, 0.007, 0.015, 0.055, 0.15, 0.3] + reg_param: + 0.2: 5 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin4" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin4/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/987484/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/934787/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", + "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin5.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin5.yaml new file mode 100644 index 000000000..d346dbaf3 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.2_ptbin5.yaml @@ -0,0 +1,225 @@ +# Processing parameters +jetR: [0.2] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [100, 150] + plot_overlay_list: + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] + max_reg_param: 10 + reg_param_variation: 2 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1: + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.07, 0.1, 0.12, 0.14, 0.17, 0.2, 0.24, 0.3, 0.5, 0.75] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.07, 0.1, 0.12, 0.14, 0.17, 0.2, 0.24, 0.3, 0.5, 0.75] + obs_bins_det_sys_binning: [0, 0.05, 0.09, 0.11, 0.13, 0.15, 0.18, 0.21, 0.25, 0.3, 0.5, 0.75] + reg_param: + 0.2: 5 + + config_R0.2_1.5: + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.03, 0.06, 0.09, 0.12, 0.16, 0.2, 0.26, 0.4, 0.7] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.03, 0.06, 0.08, 0.1, 0.12, 0.16, 0.2, 0.26, 0.4, 0.7] + obs_bins_det_sys_binning: [0, 0.03, 0.05, 0.07, 0.09, 0.11, 0.13, 0.17, 0.22, 0.28, 0.45, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_2: + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1, 0.15, 0.3, 0.5] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.1, 0.15, 0.2, 0.3, 0.4, 0.5] + obs_bins_det_sys_binning: [0, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1, 0.15, 0.2, 0.3, 0.5] + reg_param: + 0.2: 5 + + config_R0.2_3: + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.01, 0.02, 0.03, 0.04, 0.055, 0.075, 0.1, 0.2, 0.4] + obs_max_reported: [0.2] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.055, 0.075, 0.1, 0.2, 0.4] + obs_bins_det_sys_binning: [0, 0.006, 0.01, 0.015, 0.02, 0.03, 0.04, 0.05, 0.08, 0.12, 0.2, 0.4] + reg_param: + 0.2: 5 + + ############################################################################ + # Different R & alpha configurations + config_R0.2_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.09, 0.11, 0.15, 0.2, 0.3, 0.4, 0.55, 0.7] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.03, 0.05, 0.07, 0.09, 0.11, 0.14, 0.17, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7] + obs_bins_det_sys_binning: [0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.13, 0.16, 0.18, 0.28, 0.3, 0.4, 0.55, 0.7] + reg_param: + 0.2: 5 + + config_R0.2_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.007, 0.01, 0.015, 0.02, 0.04, 0.07, 0.1, 0.25, 0.45, 0.65] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.007, 0.01, 0.015, 0.02, 0.04, 0.07, 0.1, 0.17, 0.25, 0.45, 0.65] + obs_bins_det_sys_binning: [0, 0.008, 0.015, 0.025, 0.035, 0.055, 0.08, 0.11, 0.16, 0.25, 0.45, 0.65] + reg_param: + 0.2: 5 + + config_R0.2_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.002, 0.004, 0.007, 0.01, 0.02, 0.04, 0.1, 0.35, 0.5] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.004, 0.007, 0.01, 0.02, 0.04, 0.07, 0.1, 0.2, 0.35, 0.5] + obs_bins_det_sys_binning: [0, 0.003, 0.005, 0.008, 0.015, 0.025, 0.045, 0.075, 0.11, 0.21, 0.35, 0.5] + reg_param: + 0.2: 5 + + config_R0.2_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.2 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 50, 60, 70, 80, 100, 150, 200] + pt_bins_det_sys_truncation: [50, 60, 70, 80, 100, 150] + obs_bins_truth: [0, 0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.25, 0.35] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.001, 0.002, 0.005, 0.01, 0.02, 0.04, 0.1, 0.25, 0.35] + obs_bins_det_sys_binning: [0, 0.001, 0.003, 0.004, 0.008, 0.015, 0.025, 0.045, 0.075, 0.11, 0.26, 0.35] + reg_param: + 0.2: 5 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin5" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/987484/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/934787/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/Scaled_no_cuts/AnalysisResultsFinal_no1-5.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", + "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.4_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.4_ptbin3.yaml new file mode 100644 index 000000000..c7a91a39f --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.4_ptbin3.yaml @@ -0,0 +1,217 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.25, 0.7] + main_R_max: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [40, 60, 80, 100, 150] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + #obs_max_reported: [0.55] + obs_bins_det: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.17, 0.21, 0.3, 0.41, 0.6, 0.75] + #reg_param: + # 0.4: 3 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.03, 0.07, 0.12, 0.16, 0.2, 0.25, 0.45, 0.6] + #obs_max_reported: [0.45] + obs_bins_det: [0, 0.03, 0.05, 0.07, 0.12, 0.16, 0.20, 0.25, 0.45, 0.6] + obs_bins_det_sys_binning: [0, 0.03, 0.045, 0.075, 0.09, 0.13, 0.17, 0.21, 0.26, 0.48, 0.6] + #reg_param: + # 0.4: 3 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + #obs_max_reported: [0.35] + obs_bins_det: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.16, 0.22, 0.37, 0.45] + #reg_param: + # 0.4: 3 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + #obs_max_reported: [0.25] + obs_bins_det: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + obs_bins_det_sys_binning: [0, 0.01, 0.015, 0.02, 0.025, 0.045, 0.065, 0.12, 0.23, 0.35] + #reg_param: + # 0.4: 3 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.4, 0.6, 0.8] + #obs_max_reported: [0.6] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.17, 0.25, 0.4, 0.6, 0.8] + obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.05, 0.1, 0.15, 0.25, 0.41, 0.58, 0.8] + #reg_param: + # 0.4: 3 + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + #obs_max_reported: [0.5] + obs_bins_det: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.045, 0.11, 0.16, 0.27, 0.53, 0.7] + #reg_param: + # 0.4: 3 + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.085, 0.15, 0.35, 0.6] + #obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.07, 0.1, 0.15, 0.2, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.003, 0.009, 0.015, 0.03, 0.05, 0.075, 0.1, 0.16, 0.21, 0.33, 0.6] + #reg_param: + # 0.4: 3 + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + #obs_max_reported: [0.25] + obs_bins_det: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + obs_bins_det_sys_binning: [0, 0.003, 0.015, 0.035, 0.07, 0.11, 0.24, 0.5] + #reg_param: + # 0.4: 3 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR04_ptbin3" +results_pp: "/rstorage/alice/AnalysisResults/ang/AngR04_ptbin3/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: False +do_plot_final_result: False +do_plot_performance: True +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main +# - trkeff +# - prior1 +# - prior2 +# - truncation +# - binning +# - random_mass +# - fastsim_generator0 +# - fastsim_generator1 +# - fastsim_generator2 +# - fastsim_generator3 +# - subtraction1 +# - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/879564/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/987484/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/934787/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/AnalysisResultsFinal.root' + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.7 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.4_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.4_ptbin4.yaml new file mode 100644 index 000000000..8f0c54e85 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/angularity_R0.4_ptbin4.yaml @@ -0,0 +1,218 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.25, 0.7] + main_R_max: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [80, 100] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.05, 0.1, 0.2, 0.3, 0.55, 0.8] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.06, 0.09, 0.12, 0.2, 0.3, 0.4, 0.55, 0.8] + obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.21, 0.3, 0.41, 0.6, 0.75] + #reg_param: + # 0.4: 2 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.035, 0.065, 0.09, 0.21, 0.48, 0.6] + #reg_param: + # 0.4: 2 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.02, 0.05, 0.12, 0.4, 0.65] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.02, 0.05, 0.12, 0.4, 0.65] + obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.37, 0.45] + #reg_param: + # 0.4: 2 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] + obs_bins_det_sys_binning: [0, 0.005, 0.025, 0.045, 0.1, 0.33, 0.35] + #reg_param: + # 0.4: 2 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] + obs_bins_det_sys_binning: [0, 0.015, 0.045, 0.13, 0.27, 0.58, 0.8] + #reg_param: + # 0.4: 2 + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.045, 0.06, 0.155, 0.47, 0.7] + #reg_param: + # 0.4: 2 + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.001, 0.007, 0.06, 0.15, 0.33, 0.6] + #reg_param: + # 0.4: 2 + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] + obs_max_reported: [0.2] + obs_bins_det: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] + obs_bins_det_sys_binning: [0, 0.001, 0.008, 0.035, 0.07, 0.11, 0.24, 0.5] + #reg_param: + # 0.4: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR04_ptbin4" +results_pp: "/rstorage/alice/AnalysisResults/ang/AngR04_ptbin4/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 + - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/872257/AnalysisResultsFinal.root' +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/877553/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/879564/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/890497/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/879602/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/934788/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/987484/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/934787/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/950196/AnalysisResultsFinal.root' + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.7 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_performance.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_performance.yaml new file mode 100644 index 000000000..31b562fbb --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_performance.yaml @@ -0,0 +1,174 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.25, 0.7] + main_R_max: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [40, 60, 80, 100, 150] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 20 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + obs_bins_det: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.03, 0.07, 0.12, 0.16, 0.2, 0.25, 0.45, 0.6] + obs_bins_det: [0, 0.03, 0.05, 0.07, 0.12, 0.16, 0.20, 0.25, 0.45, 0.6] + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + obs_bins_det: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + obs_bins_det: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.4, 0.6, 0.8] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.17, 0.25, 0.4, 0.6, 0.8] + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + obs_bins_det: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.085, 0.15, 0.35, 0.6] + obs_bins_det: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.07, 0.1, 0.15, 0.2, 0.35, 0.6] + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + obs_bins_truth: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + obs_bins_det: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/pTcut/AngR04_performance" +#results_pp: "/rstorage/alice/AnalysisResults/ang/AngR04_ptbin3/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: False +do_plot_final_result: False +do_plot_performance: True +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main +# - trkeff +# - prior1 +# - prior2 +# - truncation +# - binning +# - random_mass +# - fastsim_generator0 +# - fastsim_generator1 +# - fastsim_generator2 +# - subtraction1 +# - subtraction2 +# - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1039270/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1064582/Scaled_no_cuts/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1070709/Scaled_no_cuts/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1064588/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1064589/Scaled_no_cuts/AnalysisResultsFinal.root' +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1064587/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1064590/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1064592/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1064591/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.7 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_ptbin3.yaml new file mode 100644 index 000000000..c96d0472f --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_ptbin3.yaml @@ -0,0 +1,223 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.25, 0.7] + main_R_max: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + #pt_bins_reported: [40, 60, 80, 100, 150] + pt_bins_reported: [60, 80] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.17, 0.21, 0.3, 0.41, 0.6, 0.75] + #reg_param: + # 0.4: 3 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.03, 0.07, 0.12, 0.16, 0.2, 0.25, 0.45, 0.6] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.03, 0.05, 0.07, 0.12, 0.16, 0.20, 0.25, 0.45, 0.6] + obs_bins_det_sys_binning: [0, 0.03, 0.045, 0.075, 0.09, 0.13, 0.17, 0.21, 0.26, 0.48, 0.6] + #reg_param: + # 0.4: 3 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.16, 0.22, 0.37, 0.45] + #reg_param: + # 0.4: 3 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + obs_bins_det_sys_binning: [0, 0.01, 0.015, 0.02, 0.025, 0.045, 0.065, 0.12, 0.23, 0.35] + #reg_param: + # 0.4: 3 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.4, 0.6, 0.8] + obs_max_reported: [0.6] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.17, 0.25, 0.4, 0.6, 0.8] + obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.05, 0.1, 0.15, 0.25, 0.41, 0.58, 0.8] + #reg_param: + # 0.4: 3 + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.045, 0.11, 0.16, 0.27, 0.53, 0.7] + #reg_param: + # 0.4: 3 + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.085, 0.15, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.07, 0.1, 0.15, 0.2, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.003, 0.009, 0.015, 0.03, 0.05, 0.075, 0.1, 0.16, 0.21, 0.33, 0.6] + #reg_param: + # 0.4: 3 + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [65, 80, 100, 120, 150] + obs_bins_truth: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + obs_bins_det_sys_binning: [0, 0.003, 0.015, 0.035, 0.07, 0.11, 0.24, 0.5] + #reg_param: + # 0.4: 3 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/pTcut/AngR04_ptbin3" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/pTcut/AngR04_ptbin3/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: True +force_rebin: True +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1039270/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1064582/Scaled_no_cuts/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1070709/Scaled_no_cuts/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1064588/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1064589/Scaled_no_cuts/AnalysisResultsFinal.root' +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1064587/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1064590/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1064592/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1064591/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# Histograms for theory predictions +#theory_predictions: ['/rstorage/alice/AnalysisResults/ang/998707/AnalysisResultsFinal.root', +# '/rstorage/alice/AnalysisResults/ang/1007194/AnalysisResultsFinal_no1-4.root', +# '/rstorage/alice/AnalysisResults/ang/1003267/AnalysisResultsFinal.root', +# '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +#theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", +# "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.7 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_ptbin4.yaml new file mode 100644 index 000000000..66696ae77 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/angularity_R0.4_ptbin4.yaml @@ -0,0 +1,222 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.25, 0.7] + main_R_max: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [80, 100] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.05, 0.1, 0.2, 0.3, 0.55, 0.8] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.06, 0.09, 0.12, 0.2, 0.3, 0.4, 0.55, 0.8] + obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.21, 0.3, 0.41, 0.6, 0.75] + #reg_param: + # 0.4: 2 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.035, 0.065, 0.09, 0.21, 0.48, 0.6] + #reg_param: + # 0.4: 2 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.02, 0.05, 0.12, 0.4, 0.65] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.02, 0.05, 0.12, 0.4, 0.65] + obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.37, 0.45] + #reg_param: + # 0.4: 2 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] + obs_bins_det_sys_binning: [0, 0.005, 0.025, 0.045, 0.1, 0.33, 0.35] + #reg_param: + # 0.4: 2 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] + obs_bins_det_sys_binning: [0, 0.015, 0.045, 0.13, 0.27, 0.58, 0.8] + #reg_param: + # 0.4: 2 + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.045, 0.06, 0.155, 0.47, 0.7] + #reg_param: + # 0.4: 2 + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.001, 0.007, 0.06, 0.15, 0.33, 0.6] + #reg_param: + # 0.4: 2 + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [85, 100, 120, 150] + obs_bins_truth: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] + obs_max_reported: [0.2] + obs_bins_det: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] + obs_bins_det_sys_binning: [0, 0.001, 0.008, 0.035, 0.07, 0.11, 0.24, 0.5] + #reg_param: + # 0.4: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/pTcut/AngR04_ptbin4" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/pTcut/AngR04_ptbin4/ang/final_results/fFinalResults.root" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 + - subtraction1 + - subtraction2 + - thermal_closure + +main_data: '/rstorage/alice/AnalysisResults/ang/1039270/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1064582/Scaled_no_cuts/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1070709/Scaled_no_cuts/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1064588/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1064589/Scaled_no_cuts/AnalysisResultsFinal.root' +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1064587/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1064590/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1064592/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1064591/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# Histograms for theory predictions +#theory_predictions: ['/rstorage/alice/AnalysisResults/ang/998707/AnalysisResultsFinal.root', +# '/rstorage/alice/AnalysisResults/ang/1007194/AnalysisResultsFinal_no1-4.root', +# '/rstorage/alice/AnalysisResults/ang/1003267/AnalysisResultsFinal.root', +# '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +#theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", +# "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.7 + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_pTcut.yaml new file mode 100644 index 000000000..5a2507a40 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_pTcut.yaml @@ -0,0 +1,104 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.05, 0.1, 0.5] + 0.4: [0.05, 0.25, 0.7] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncomment for MC // comment for data +reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_randmass_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_randmass_pTcut.yaml new file mode 100644 index 000000000..969a9e466 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_randmass_pTcut.yaml @@ -0,0 +1,109 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: True # Whether to randomly assign K and p mass to some tracks + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 + diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_fastsim_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_fastsim_pTcut.yaml new file mode 100644 index 000000000..0ee1bbb10 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_fastsim_pTcut.yaml @@ -0,0 +1,106 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.05, 0.1, 0.5] + 0.4: [0.05, 0.25, 0.7] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + # Also create histograms without subtraction, similar to pp case + include_no_subtraction: True + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: True +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncomment for MC // comment for data +reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_pTcut.yaml new file mode 100644 index 000000000..f910e7b4f --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_pTcut.yaml @@ -0,0 +1,106 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.05, 0.1, 0.5] + 0.4: [0.05, 0.25, 0.7] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + # Also create histograms without subtraction, similar to pp case + include_no_subtraction: True + +# Parameters needed for MC processing in Pb-Pb +#dry_run: False +#fast_simulation: False +#emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +#mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +#reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_randmass_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_randmass_pTcut.yaml new file mode 100644 index 000000000..42667eda8 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_randmass_pTcut.yaml @@ -0,0 +1,107 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: True # Whether to randomly assign K and p mass to some tracks + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +#dry_run: False +#fast_simulation: False +#emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +#mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +#reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_thermal_closure_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_thermal_closure_pTcut.yaml new file mode 100644 index 000000000..0167ed34c --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_thermal_closure_pTcut.yaml @@ -0,0 +1,112 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 0.02 + +# Parameters for thermal model +thermal_model: + beta: 0.425 + N_avg: 2500 + sigma_N: 500 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 + + diff --git a/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_treff_pTcut.yaml b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_treff_pTcut.yaml new file mode 100644 index 000000000..077c68c7c --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_treff_pTcut.yaml @@ -0,0 +1,104 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5 +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 0.06 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_performance.yaml b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_performance.yaml new file mode 100644 index 000000000..f0ae195e1 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_performance.yaml @@ -0,0 +1,136 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [40, 60, 80, 100, 150] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 20 + reg_param_variation: 2 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.07, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + #obs_max_reported: [7] + obs_bins_det: [0, 0.07, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_bins_det_sys_binning: [0, 0.5, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.2, 4.6, 5.2, + 5.8, 6.8, 8, 9, 12] + #reg_param: + # 0.2: 15 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + #obs_max_reported: [7] + obs_bins_det: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_bins_det_sys_binning: [0, 0.4, 0.7, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.4, 5, 6, 7, 10] + SoftDrop: + zcut: 0.2 + beta: 0 + #reg_param: + # 0.2: 15 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_performance" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin2/mass/final_results/fFinalResults.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: False +do_plot_final_result: False +do_plot_performance: True +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1021096/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1021095/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1028380/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1028397/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1028398/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/1028400/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1028399/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1021094/AnalysisResultsFinal.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", "JEWEL pp"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin2.yaml b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin2.yaml new file mode 100644 index 000000000..0f02fe226 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin2.yaml @@ -0,0 +1,137 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [40, 60] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 15 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_max_reported: [7] + obs_bins_det: [0, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_bins_det_sys_binning: [0, 0.5, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.2, 4.6, 5.2, + 5.8, 6.8, 8, 9, 12] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_max_reported: [7] + obs_bins_det: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_bins_det_sys_binning: [0, 0.4, 0.7, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.4, 5, 6, 7, 10] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin2" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin2/mass/final_results/fFinalResults.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1021096/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1028380/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1028397/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1028398/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/1028400/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1028399/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1021094/AnalysisResultsFinal.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/Jetscape_ALICE_JetMass/JETSCAPE_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", "JEWEL pp", "JETSCAPE (MATTER+LBT)"] +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 + diff --git a/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin3.yaml new file mode 100644 index 000000000..37d56ac44 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin3.yaml @@ -0,0 +1,137 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [60, 80] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_max_reported: [10] + obs_bins_det: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_bins_det_sys_binning: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_max_reported: [10] + obs_bins_det: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_bins_det_sys_binning: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin3" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin3/mass/final_results/fFinalResults.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1021096/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +#trkeff_response: '/rstorage/alice/AnalysisResults/ang/1021095/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1028380/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1028397/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1028398/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/1028400/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1028399/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1021094/AnalysisResultsFinal.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/Jetscape_ALICE_JetMass/JETSCAPE_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin4.yaml new file mode 100644 index 000000000..4c5036d2d --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin4.yaml @@ -0,0 +1,136 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [80, 100] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 2, 4, 6, 8, 12, 16] + obs_max_reported: [12] + obs_bins_det: [0, 2, 4, 6, 8, 12, 16] + obs_bins_det_sys_binning: [0, 1.8, 3.6, 4.4, 5.2, 7, 9, 11, 13, 16, 20] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 1.8, 3.6, 5.8, 8, 12, 16] + obs_max_reported: [12] + obs_bins_det: [0, 1.8, 3.6, 5.8, 8, 12, 16] + obs_bins_det_sys_binning: [0, 1.6, 3.2, 5.6, 7.6, 9, 12, 16, 20] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin4" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin4/mass/final_results/fFinalResults.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1021096/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1028380/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1028397/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1028398/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/1028400/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1028399/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1021094/AnalysisResultsFinal.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/Jetscape_ALICE_JetMass/JETSCAPE_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin5.yaml b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin5.yaml new file mode 100644 index 000000000..164fc7a95 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.2_ptbin5.yaml @@ -0,0 +1,138 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.1, 0.5] + main_R_max: 0.1 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [100, 150] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 16, 20] + obs_max_reported: [16] + obs_bins_det: [0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 16, 20] + obs_bins_det_sys_binning: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 22, 25] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 80, 100, 120, 150] + obs_bins_truth: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 16, 20] + obs_max_reported: [16] + obs_bins_det: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 16, 20] + obs_bins_det_sys_binning: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 22, 25] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR02_ptbin5" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: '' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1021096/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +#trkeff_response: '/rstorage/alice/AnalysisResults/ang/1021095/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1150066/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1028380/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1028397/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1028398/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/1028400/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1028399/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1021094/AnalysisResultsFinal.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/1216098/Scaled_no_cuts/AnalysisResultsFinal_no1-6.root', + '/rstorage/alice/AnalysisResults/ang/1216355/Scaled_no_cuts/AnalysisResultsFinal_no1-8.root', + '/rstorage/alice/AnalysisResults/ang/1216018/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/Jetscape_ALICE_JetMass/JETSCAPE_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", "JEWEL pp", "JETSCAPE (MATTER+LBT)"] + +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.5 + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.4_performance.yaml b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.4_performance.yaml new file mode 100644 index 000000000..7539ea9a9 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/mass_R0.4_performance.yaml @@ -0,0 +1,131 @@ +# Processing parameters +jetR: [0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: [0.05, 0.25, 0.7] + main_R_max: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Max eta value (for plots) +eta_max: 0.9 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [40, 60, 80, 100, 150] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 20 + reg_param_variation: 2 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.4 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.07, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_bins_det: [0, 0.07, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_bins_det_sys_binning: [0, 0.5, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.2, 4.6, 5.2, + 5.8, 6.8, 8, 9, 12] + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.4 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_bins_det: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_bins_det_sys_binning: [0, 0.4, 0.7, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.4, 5, 6, 7, 10] + SoftDrop: + zcut: 0.2 + beta: 0 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/PbPb/AngR04_performance" +results_pp: "/rstorage/alice/AnalysisResults/ang/pp/AngR04_ptbin2/mass/final_results/fFinalResults.root" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: False +do_plot_final_result: False +do_plot_performance: True +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + - fastsim_generator2 +# - fastsim_generator3 + - subtraction1 + - subtraction2 + - thermal_closure + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1020932/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1021096/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1021095/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1028380/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1028397/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig, JEWEL (recoils off), JEWEL (recoils on) +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1028398/AnalysisResultsFinal_no1-5.root', + '/rstorage/alice/AnalysisResults/ang/1028400/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1028399/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/997583/AnalysisResultsFinal.root'] +thermal_closure: '/rstorage/alice/AnalysisResults/ang/1021094/AnalysisResultsFinal.root' + +# Histograms for theory predictions +theory_predictions: ['/rstorage/alice/AnalysisResults/ang/998707/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1007194/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1003267/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/PbPb/jetscape_results.root'] +theory_predictions_names: ["JEWEL (recoils off)", "JEWEL (recoils on)", + "JEWEL pp", "JETSCAPE (MATTER+LBT)"] +# R_max variations +R_max_variation1: 0.05 +R_max_variation2: 0.7 + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.2 +prior2_variation_parameter: -0.2 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb.yaml new file mode 100644 index 000000000..80031940c --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb.yaml @@ -0,0 +1,104 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.05, 0.1, 0.5] + 0.4: [0.05, 0.25, 0.7] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + # Also create histograms without subtraction, similar to pp case + include_no_subtraction: True + +# Parameters needed for MC processing in Pb-Pb +#dry_run: False +#fast_simulation: False +#emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +#mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +#reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_embedding.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_embedding.yaml new file mode 100644 index 000000000..181428382 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_embedding.yaml @@ -0,0 +1,102 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.05, 0.1, 0.5] + 0.4: [0.05, 0.25, 0.7] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_embedding_randmass.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_embedding_randmass.yaml new file mode 100644 index 000000000..dddd5992b --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_embedding_randmass.yaml @@ -0,0 +1,107 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: True # Whether to randomly assign K and p mass to some tracks + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 + diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_fastsim.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_fastsim.yaml new file mode 100644 index 000000000..18ab2bf7a --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_fastsim.yaml @@ -0,0 +1,104 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.05, 0.1, 0.5] + 0.4: [0.05, 0.25, 0.7] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + # Also create histograms without subtraction, similar to pp case + include_no_subtraction: True + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: True +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncomment for MC // comment for data +reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_randmass.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_randmass.yaml new file mode 100644 index 000000000..dde796b6d --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_randmass.yaml @@ -0,0 +1,105 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: True # Whether to randomly assign K and p mass to some tracks + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +#dry_run: False +#fast_simulation: False +#emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +#mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +#reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_thermal_closure.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_thermal_closure.yaml new file mode 100644 index 000000000..c61aa2bf0 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_thermal_closure.yaml @@ -0,0 +1,108 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 0.02 + +# Parameters for thermal model +thermal_model: + beta: 0.425 + N_avg: 2500 + sigma_N: 500 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_treff.yaml b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_treff.yaml new file mode 100644 index 000000000..c60f339c2 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/process_angularity_PbPb_treff.yaml @@ -0,0 +1,102 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +#reject_tracks_fraction: 0 + +# Background subtraction parameters for Pb-Pb +constituent_subtractor: + max_distance: + 0.2: [0.1] + 0.4: [0.25] + main_R_max: + 0.2: 0.1 + 0.4: 0.25 + alpha: 0 + max_eta: 0.9 + bge_rho_grid_size: 1.0 + max_pt_correct: 100 + ghost_area: 0.01 + +# Parameters needed for MC processing in Pb-Pb +dry_run: False +fast_simulation: False +emb_file_list: '/rstorage/alice/data/LHC18qr/570/files.txt' +mc_fraction_threshold: 0.5 +# Uncommend for MC // comment for data +reject_tracks_fraction: 'LHC18qr' + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_norecoil_nosubtraction.yaml b/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_norecoil_nosubtraction.yaml new file mode 100644 index 000000000..c12377f5d --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_norecoil_nosubtraction.yaml @@ -0,0 +1,75 @@ +# Processing parameters +process_observables: ['ang', 'mass'] +jetR: [0.2, 0.4] +recoils_off: True + +# For extra verbose output in some functions +debug_level: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Jet mass both with and without Soft Drop grooming + config_m: + name: "mass" + + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_yesrecoil_nosubtraction.yaml b/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_yesrecoil_nosubtraction.yaml new file mode 100644 index 000000000..fd107d24b --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_yesrecoil_nosubtraction.yaml @@ -0,0 +1,75 @@ +# Processing parameters +process_observables: ['ang', 'mass'] +jetR: [0.2, 0.4] +recoils_off: False + +# For extra verbose output in some functions +debug_level: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Jet mass both with and without Soft Drop grooming + config_m: + name: "mass" + + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_yesrecoil_yessubtraction_negative_recombiner.yaml b/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_yesrecoil_yessubtraction_negative_recombiner.yaml new file mode 100644 index 000000000..7d7611daf --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/PbPb/theory/JEWEL_yesrecoil_yessubtraction_negative_recombiner.yaml @@ -0,0 +1,77 @@ +# Processing parameters +process_observables: ['ang', 'mass'] +jetR: [0.2, 0.4] +recoils_off: False +thermal_subtraction_method: "negative_recombiner" +thermal_rejection_fraction: 0.3 + +# For extra verbose output in some functions +debug_level: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Jet mass both with and without Soft Drop grooming + config_m: + name: "mass" + + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin1.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin1.yaml index 5d7ccce9e..5ef8f3b7d 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin1.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin1.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.2] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [20, 40] plot_overlay_list: - - ['config_R0.2_B1', 'config_R0.2_B1.5', 'config_R0.2_B2', 'config_R0.2_B3'] - - ['config_R0.2_B1_SD', 'config_R0.2_B1.5_SD', 'config_R0.2_B2_SD', 'config_R0.2_B3_SD'] + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.2_B1: + # Different R & alpha configurations + config_R0.2_1: R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -54,9 +54,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5: + config_R0.2_1.5: R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -70,9 +70,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2: + config_R0.2_2: R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -86,9 +86,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3: + config_R0.2_3: R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -103,13 +103,13 @@ ang: 0.2: 3 ############################################################################ - # Different R & beta configurations with SoftDrop - config_R0.2_B1_SD: + # Different R & alpha configurations with SoftDrop + config_R0.2_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -123,12 +123,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5_SD: + config_R0.2_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -142,12 +142,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2_SD: + config_R0.2_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -161,12 +161,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3_SD: + config_R0.2_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -184,7 +184,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R02.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin2.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin2.yaml index 42378c224..35ae54dac 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin2.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin2.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.2] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [40, 60] plot_overlay_list: - - ['config_R0.2_B1', 'config_R0.2_B1.5', 'config_R0.2_B2', 'config_R0.2_B3'] - - ['config_R0.2_B1_SD', 'config_R0.2_B1.5_SD', 'config_R0.2_B2_SD', 'config_R0.2_B3_SD'] + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.2_B1: + # Different R & alpha configurations + config_R0.2_1: R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -50,9 +50,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5: + config_R0.2_1.5: R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -64,9 +64,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2: + config_R0.2_2: R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -78,9 +78,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3: + config_R0.2_3: R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120] @@ -93,13 +93,13 @@ ang: 0.2: 3 ############################################################################ - # Different R & beta configurations for SD - config_R0.2_B1_SD: + # Different R & alpha configurations for SD + config_R0.2_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120] @@ -110,12 +110,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5_SD: + config_R0.2_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120] @@ -126,12 +126,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2_SD: + config_R0.2_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120] @@ -143,12 +143,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3_SD: + config_R0.2_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120] @@ -163,7 +163,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R02.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin3.yaml index 3c5d91fbf..54ace0555 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin3.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin3.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.2] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [60, 80] plot_overlay_list: - - ['config_R0.2_B1', 'config_R0.2_B1.5', 'config_R0.2_B2', 'config_R0.2_B3'] - - ['config_R0.2_B1_SD', 'config_R0.2_B1.5_SD', 'config_R0.2_B2_SD', 'config_R0.2_B3_SD'] + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.2_B1: + # Different R & alpha configurations + config_R0.2_1: R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -50,9 +50,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5: + config_R0.2_1.5: R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -63,9 +63,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2: + config_R0.2_2: R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -76,9 +76,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3: + config_R0.2_3: R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -90,13 +90,13 @@ ang: 0.2: 3 ############################################################################ - # Different R & beta configurations - config_R0.2_B1_SD: + # Different R & alpha configurations + config_R0.2_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -107,12 +107,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5_SD: + config_R0.2_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -123,12 +123,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2_SD: + config_R0.2_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -139,12 +139,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3_SD: + config_R0.2_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -159,7 +159,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R02.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin4.yaml index 1151c84d5..d2edf8b61 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin4.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.2_ptbin4.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.2] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [80, 100] plot_overlay_list: - - ['config_R0.2_B1', 'config_R0.2_B1.5', 'config_R0.2_B2', 'config_R0.2_B3'] - - ['config_R0.2_B1_SD', 'config_R0.2_B1.5_SD', 'config_R0.2_B2_SD', 'config_R0.2_B3_SD'] + - ['config_R0.2_1', 'config_R0.2_1.5', 'config_R0.2_2', 'config_R0.2_3'] + - ['config_R0.2_1_SD', 'config_R0.2_1.5_SD', 'config_R0.2_2_SD', 'config_R0.2_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.2_B1: + # Different R & alpha configurations + config_R0.2_1: R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -50,9 +50,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5: + config_R0.2_1.5: R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -63,9 +63,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2: + config_R0.2_2: R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -76,9 +76,9 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3: + config_R0.2_3: R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -90,13 +90,13 @@ ang: 0.2: 3 ############################################################################ - # Different R & beta configurations - config_R0.2_B1_SD: + # Different R & alpha configurations + config_R0.2_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -107,12 +107,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B1.5_SD: + config_R0.2_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -123,12 +123,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B2_SD: + config_R0.2_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -139,12 +139,12 @@ ang: reg_param: 0.2: 3 - config_R0.2_B3_SD: + config_R0.2_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.2 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -159,7 +159,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R02.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin0.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin0.yaml new file mode 100644 index 000000000..1964015c7 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin0.yaml @@ -0,0 +1,214 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [10, 20, 40] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + # - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.08, 0.18, 0.28, 0.38, 0.52, 0.8] + obs_max_reported: [0.52, 0.52] + obs_bins_det: [0, 0.02, 0.05, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, + 0.26, 0.28, 0.3, 0.34, 0.38, 0.42, 0.46, 0.5, 0.6, 0.8] + obs_bins_det_sys_binning: [0.0, 0.025, 0.05, 0.075, 0.1, 0.12, 0.15, 0.18, 0.2, 0.22, + 0.25, 0.28, 0.3, 0.32, 0.35, 0.4, 0.45, 0.55, 0.8] + #reg_param: + # 0.4: 5 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.04, 0.09, 0.15, 0.24, 0.35, 0.45, 0.7] + obs_max_reported: [0.35, 0.35] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.12, + 0.14, 0.16, 0.18, 0.20, 0.22, 0.24, 0.28, 0.32, 0.38, 0.45, 0.5, 0.75] + obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.045, 0.06, 0.075, 0.09, 0.11, 0.13, 0.15, + 0.17, 0.19, 0.21, 0.23, 0.26, 0.29, 0.33, 0.37, 0.48, 0.75] + #reg_param: + # 0.4: 5 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.025, 0.05, 0.09, 0.13, 0.18, 0.3, 0.35, 0.5, 0.7] + obs_max_reported: [0.3, 0.3] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, + 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.2, 0.22, 0.25, 0.3, 0.35, 0.4, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.015, 0.025, 0.035, 0.045, 0.055, 0.065, 0.075, 0.085, 0.1, + 0.11, 0.13, 0.16, 0.18, 0.22, 0.25, 0.32, 0.37, 0.42, 0.55, 0.7] + #reg_param: + # 0.4: 5 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.01, 0.03, 0.05, 0.075, 0.12, 0.19, 0.25, 0.35, 0.5] + obs_max_reported: [0.19, 0.19] + obs_bins_det: [0, 0.002, 0.005, 0.008, 0.01, 0.015, 0.02, 0.025, 0.03, 0.035, 0.04, 0.045, + 0.05, 0.055, 0.06, 0.065, 0.07, 0.075, 0.08, 0.09, 0.1, 0.12, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5] + obs_bins_det_sys_binning: [0, 0.003, 0.006, 0.009, 0.015, 0.025, 0.035, 0.045, 0.055, + 0.065, 0.075, 0.085, 0.095, 0.12, 0.16, 0.2, 0.26, 0.32, 0.42, 0.5] + #reg_param: + # 0.4: 5 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + #config_R0.4_1_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 1 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.4, 0.6, 0.8] + # obs_max_reported: [0.6] + # obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.17, 0.25, 0.4, 0.6, 0.8] + # obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.05, 0.1, 0.15, 0.25, 0.41, 0.58, 0.8] + # #reg_param: + # # 0.4: 3 + + #config_R0.4_1.5_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 1.5 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + # obs_max_reported: [0.5] + # obs_bins_det: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + # obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.045, 0.11, 0.16, 0.27, 0.53, 0.7] + # #reg_param: + # # 0.4: 3 + + #config_R0.4_2_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 2 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.085, 0.15, 0.35, 0.6] + # obs_max_reported: [0.35] + # obs_bins_det: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.07, 0.1, 0.15, 0.2, 0.35, 0.6] + # obs_bins_det_sys_binning: [0, 0.003, 0.009, 0.015, 0.03, 0.05, 0.075, 0.1, 0.16, 0.21, 0.33, 0.6] + # #reg_param: + # # 0.4: 3 + + #config_R0.4_3_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 3 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + # obs_max_reported: [0.25] + # obs_bins_det: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + # obs_bins_det_sys_binning: [0, 0.003, 0.015, 0.035, 0.07, 0.11, 0.24, 0.5] + # #reg_param: + # # 0.4: 3 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/AngR04_ptbin0" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 +# - fastsim_generator2 + +## Old files (used for inclusive jet paper) +main_data: '/rstorage/alice/AnalysisResults/ang/287220/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/351487/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/351489/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/287383/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/351488/AnalysisResultsFinal.root' +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/287385/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/287386/AnalysisResultsFinal.root'] + +## New files (with mass) +# Paths to processing output, to be used for unfolding +#main_data: '/rstorage/alice/AnalysisResults/ang/1110550/AnalysisResultsFinal.root' +#main_response: '/rstorage/alice/AnalysisResults/ang/1110651/Scaled_no_cuts/AnalysisResultsFinal.root' +#trkeff_response: '/rstorage/alice/AnalysisResults/ang/1110652/Scaled_no_cuts/AnalysisResultsFinal.root' +#randmass_data: '/rstorage/alice/AnalysisResults/ang/1110653/AnalysisResultsFinal.root' +#randmass_response: '/rstorage/alice/AnalysisResults/ang/1110654/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig +#fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1110655/Scaled_no_cuts/AnalysisResultsFinal.root', +# '/rstorage/alice/AnalysisResults/ang/1110656/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin1.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin1.yaml index 9f3429889..1a7743878 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin1.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin1.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.4] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [20, 40] plot_overlay_list: - - ['config_R0.4_B1', 'config_R0.4_B1.5', 'config_R0.4_B2', 'config_R0.4_B3'] - - ['config_R0.4_B1_SD', 'config_R0.4_B1.5_SD', 'config_R0.4_B2_SD', 'config_R0.4_B3_SD'] + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.4_B1: + # Different R & alpha configurations + config_R0.4_1: R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -57,9 +57,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B1.5: + config_R0.4_1.5: R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -77,9 +77,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B2: + config_R0.4_2: R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -96,9 +96,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B3: + config_R0.4_3: R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -116,13 +116,13 @@ ang: 0.4: 3 ############################################################################ - # Different R & beta configurations for SoftDrop - config_R0.4_B1_SD: + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -140,12 +140,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B1.5_SD: + config_R0.4_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -163,12 +163,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B2_SD: + config_R0.4_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -185,12 +185,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B3_SD: + config_R0.4_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 70, 80, 100, 120] @@ -211,7 +211,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R04.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin2.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin2.yaml index 654936708..133036e94 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin2.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin2.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.4] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [40, 60] plot_overlay_list: - - ['config_R0.4_B1', 'config_R0.4_B1.5', 'config_R0.4_B2', 'config_R0.4_B3'] - - ['config_R0.4_B1_SD', 'config_R0.4_B1.5_SD', 'config_R0.4_B2_SD', 'config_R0.4_B3_SD'] + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.4_B1: + # Different R & alpha configurations + config_R0.4_1: R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -51,9 +51,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B1.5: + config_R0.4_1.5: R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -65,9 +65,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B2: + config_R0.4_2: R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -78,9 +78,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B3: + config_R0.4_3: R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -93,13 +93,13 @@ ang: ############################################################################ - # Different R & beta configurations for SoftDrop - config_R0.4_B1_SD: + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -112,12 +112,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B1.5_SD: + config_R0.4_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -131,12 +131,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B2_SD: + config_R0.4_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -148,12 +148,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B3_SD: + config_R0.4_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] @@ -169,7 +169,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R04.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin3.yaml index 39c6b1fab..df3d0e0ae 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin3.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin3.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.4] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [60, 80] plot_overlay_list: - - ['config_R0.4_B1', 'config_R0.4_B1.5', 'config_R0.4_B2', 'config_R0.4_B3'] - - ['config_R0.4_B1_SD', 'config_R0.4_B1.5_SD', 'config_R0.4_B2_SD', 'config_R0.4_B3_SD'] + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.4_B1: + # Different R & alpha configurations + config_R0.4_1: R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -50,9 +50,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B1.5: + config_R0.4_1.5: R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -63,9 +63,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B2: + config_R0.4_2: R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -76,9 +76,9 @@ ang: reg_param: 0.4: 3 - config_R0.4_B3: + config_R0.4_3: R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -90,13 +90,13 @@ ang: 0.4: 3 ############################################################################ - # Different R & beta configurations for SoftDrop - config_R0.4_B1_SD: + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -107,12 +107,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B1.5_SD: + config_R0.4_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -123,12 +123,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B2_SD: + config_R0.4_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -139,12 +139,12 @@ ang: reg_param: 0.4: 3 - config_R0.4_B3_SD: + config_R0.4_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -159,7 +159,7 @@ ang: ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R04.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] diff --git a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin4.yaml index 5e6ca78b4..ea69adde2 100644 --- a/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin4.yaml +++ b/pyjetty/alice_analysis/config/ang/angularity_R0.4_ptbin4.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.4] -betas: [1, 1.5, 2, 3] +alphas: [1, 1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -27,19 +27,19 @@ sd_beta: 0 # exponent on (deltaR / R) ang: common_settings: - xtitle: '#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}' - ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#beta}}^{#it{#kappa}=1}}' + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' pt_bins_reported: [80, 100] plot_overlay_list: - - ['config_R0.4_B1', 'config_R0.4_B1.5', 'config_R0.4_B2', 'config_R0.4_B3'] - - ['config_R0.4_B1_SD', 'config_R0.4_B1.5_SD', 'config_R0.4_B2_SD', 'config_R0.4_B3_SD'] + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] max_reg_param: 10 ############################################################################ - # Different R & beta configurations - config_R0.4_B1: + # Different R & alpha configurations + config_R0.4_1: R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -47,12 +47,12 @@ ang: obs_max_reported: [0.55] obs_bins_det: [0, 0.06, 0.09, 0.12, 0.2, 0.3, 0.4, 0.55, 0.8] obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.21, 0.3, 0.41, 0.6, 0.75] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 - config_R0.4_B1.5: + config_R0.4_1.5: R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -60,12 +60,12 @@ ang: obs_max_reported: [0.45] obs_bins_det: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] obs_bins_det_sys_binning: [0, 0.035, 0.065, 0.09, 0.21, 0.48, 0.6] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 - config_R0.4_B2: + config_R0.4_2: R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -73,12 +73,12 @@ ang: obs_max_reported: [0.4] obs_bins_det: [0, 0.02, 0.05, 0.12, 0.4, 0.65] obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.37, 0.45] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 - config_R0.4_B3: + config_R0.4_3: R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -86,17 +86,17 @@ ang: obs_max_reported: [0.3] obs_bins_det: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] obs_bins_det_sys_binning: [0, 0.005, 0.025, 0.045, 0.1, 0.33, 0.35] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 ############################################################################ - # Different R & beta configurations for SoftDrop - config_R0.4_B1_SD: + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1 + alpha: 1 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -104,15 +104,15 @@ ang: obs_max_reported: [0.55] obs_bins_det: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] obs_bins_det_sys_binning: [0, 0.015, 0.045, 0.13, 0.27, 0.58, 0.8] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 - config_R0.4_B1.5_SD: + config_R0.4_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 1.5 + alpha: 1.5 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -120,15 +120,15 @@ ang: obs_max_reported: [0.45] obs_bins_det: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.045, 0.06, 0.155, 0.47, 0.7] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 - config_R0.4_B2_SD: + config_R0.4_2_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 2 + alpha: 2 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -136,15 +136,15 @@ ang: obs_max_reported: [0.35] obs_bins_det: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] obs_bins_det_sys_binning: [0, 0.001, 0.007, 0.06, 0.15, 0.33, 0.6] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 - config_R0.4_B3_SD: + config_R0.4_3_SD: SoftDrop: zcut: 0.2 beta: 0 R: 0.4 - beta: 3 + alpha: 3 pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] @@ -152,14 +152,14 @@ ang: obs_max_reported: [0.2] obs_bins_det: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] obs_bins_det_sys_binning: [0, 0.001, 0.008, 0.035, 0.07, 0.11, 0.24, 0.5] - reg_param: - 0.4: 3 + #reg_param: + # 0.4: 3 ############################################################################### # Theory comparison parameters do_theory_comp: True -theory_beta: [1.5, 2, 3] +theory_alpha: [1.5, 2, 3] theory_dir: "/home/ezra/theory_predictions/" pt_scale_factors_filename: "qg_fractions-ALICE-R04.txt" response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] @@ -179,8 +179,8 @@ output_dir: "/rstorage/alice/AnalysisResults/ang/AngR04_ptbin4" roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" analysis_observable: 'ang' -do_unfolding: False -force_rebin: False +do_unfolding: True +force_rebin: True do_systematics: True do_plot_final_result: True do_plot_performance: False diff --git a/pyjetty/alice_analysis/config/ang/gen_angularity.yaml b/pyjetty/alice_analysis/config/ang/gen_angularity.yaml index 884bb7431..bb723197b 100644 --- a/pyjetty/alice_analysis/config/ang/gen_angularity.yaml +++ b/pyjetty/alice_analysis/config/ang/gen_angularity.yaml @@ -1,6 +1,6 @@ # Processing parameters jetR: [0.2, 0.4] -betas: [1.5, 2, 3] +alphas: [1.5, 2, 3] # Initial detector-level binnings. Distributions are rebinned via configs before unfolding n_pt_bins: 195 @@ -14,7 +14,7 @@ rap_limits: [-1, 1] debug_level: 0 process_observables: ['ang'] -jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +jet_matching_distance: 0.5 # Match jets with deltaR < jet_matching_distance*jetR reject_tracks_fraction: 0 # SoftDrop setting (current same for all SD plots) @@ -31,34 +31,34 @@ theory_pt_bins: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 90, 100, ang: ############################################################################ - # Different R & beta configurations + # Different R & alpha configurations - config_B1.5: - beta: 1.5 + config_1.5: + alpha: 1.5 - config_B2: - beta: 2 + config_2: + alpha: 2 - config_B3: - beta: 3 + config_3: + alpha: 3 ############################################################################ - # Different R & beta configurations for SoftDrop + # Different R & alpha configurations for SoftDrop - config_B1.5_SD: + config_1.5_SD: SoftDrop: zcut: 0.2 beta: 0 - beta: 1.5 + alpha: 1.5 - config_B2_SD: + config_2_SD: SoftDrop: zcut: 0.2 beta: 0 - beta: 2 + alpha: 2 - config_B3_SD: + config_3_SD: SoftDrop: zcut: 0.2 beta: 0 - beta: 3 \ No newline at end of file + alpha: 3 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_D0_baseline.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_D0_baseline.yaml new file mode 100644 index 000000000..d9b1c68af --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_D0_baseline.yaml @@ -0,0 +1,204 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [10, 20, 40] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + # - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.08, 0.18, 0.28, 0.38, 0.52, 0.8] + obs_max_reported: [0.52, 0.52] + obs_bins_det: [0, 0.02, 0.05, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, + 0.26, 0.28, 0.3, 0.34, 0.38, 0.42, 0.46, 0.5, 0.6, 0.8] + obs_bins_det_sys_binning: [0.0, 0.025, 0.05, 0.075, 0.1, 0.12, 0.15, 0.18, 0.2, 0.22, + 0.25, 0.28, 0.3, 0.32, 0.35, 0.4, 0.45, 0.55, 0.8] + reg_param: + 0.4: 5 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.04, 0.09, 0.15, 0.24, 0.35, 0.45, 0.7] + obs_max_reported: [0.35, 0.35] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.12, + 0.14, 0.16, 0.18, 0.20, 0.22, 0.24, 0.28, 0.32, 0.38, 0.45, 0.5, 0.75] + obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.045, 0.06, 0.075, 0.09, 0.11, 0.13, 0.15, + 0.17, 0.19, 0.21, 0.23, 0.26, 0.29, 0.33, 0.37, 0.48, 0.75] + reg_param: + 0.4: 5 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.025, 0.05, 0.09, 0.13, 0.18, 0.3, 0.35, 0.5, 0.7] + obs_max_reported: [0.3, 0.3] + obs_bins_det: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, + 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.2, 0.22, 0.25, 0.3, 0.35, 0.4, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.015, 0.025, 0.035, 0.045, 0.055, 0.065, 0.075, 0.085, 0.1, + 0.11, 0.13, 0.16, 0.18, 0.22, 0.25, 0.32, 0.37, 0.42, 0.55, 0.7] + reg_param: + 0.4: 5 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 10, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100] + obs_bins_truth: [0.0, 0.01, 0.03, 0.05, 0.075, 0.12, 0.19, 0.25, 0.35, 0.5] + obs_max_reported: [0.19, 0.19] + obs_bins_det: [0, 0.002, 0.005, 0.008, 0.01, 0.015, 0.02, 0.025, 0.03, 0.035, 0.04, 0.045, + 0.05, 0.055, 0.06, 0.065, 0.07, 0.075, 0.08, 0.09, 0.1, 0.12, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5] + obs_bins_det_sys_binning: [0, 0.003, 0.006, 0.009, 0.015, 0.025, 0.035, 0.045, 0.055, + 0.065, 0.075, 0.085, 0.095, 0.12, 0.16, 0.2, 0.26, 0.32, 0.42, 0.5] + reg_param: + 0.4: 5 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + #config_R0.4_1_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 1 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.4, 0.6, 0.8] + # obs_max_reported: [0.6] + # obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.17, 0.25, 0.4, 0.6, 0.8] + # obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.05, 0.1, 0.15, 0.25, 0.41, 0.58, 0.8] + # #reg_param: + # # 0.4: 3 + + #config_R0.4_1.5_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 1.5 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + # obs_max_reported: [0.5] + # obs_bins_det: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + # obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.045, 0.11, 0.16, 0.27, 0.53, 0.7] + # #reg_param: + # # 0.4: 3 + + #config_R0.4_2_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 2 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.085, 0.15, 0.35, 0.6] + # obs_max_reported: [0.35] + # obs_bins_det: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.07, 0.1, 0.15, 0.2, 0.35, 0.6] + # obs_bins_det_sys_binning: [0, 0.003, 0.009, 0.015, 0.03, 0.05, 0.075, 0.1, 0.16, 0.21, 0.33, 0.6] + # #reg_param: + # # 0.4: 3 + + #config_R0.4_3_SD: + # SoftDrop: + # zcut: 0.2 + # beta: 0 + # R: 0.4 + # alpha: 3 + # pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + # pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + # pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + # obs_bins_truth: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + # obs_max_reported: [0.25] + # obs_bins_det: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + # obs_bins_det_sys_binning: [0, 0.003, 0.015, 0.035, 0.07, 0.11, 0.24, 0.5] + # #reg_param: + # # 0.4: 3 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/pTcut/AngR04_D0_baseline" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 +# - fastsim_generator2 + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1163212/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1191459/Scaled_no_cuts/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1191462/Scaled_no_cuts/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1191200/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1191466/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1191463/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1191464/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_ptbin3.yaml new file mode 100644 index 000000000..27645ea08 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_ptbin3.yaml @@ -0,0 +1,196 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [60, 80] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.07, 0.12, 0.16, 0.2, 0.25, 0.35, 0.55, 0.75] + obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.17, 0.21, 0.3, 0.41, 0.6, 0.75] + #reg_param: + # 0.4: 3 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.03, 0.07, 0.12, 0.16, 0.2, 0.25, 0.45, 0.6] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.03, 0.05, 0.07, 0.12, 0.16, 0.20, 0.25, 0.45, 0.6] + obs_bins_det_sys_binning: [0, 0.03, 0.045, 0.075, 0.09, 0.13, 0.17, 0.21, 0.26, 0.48, 0.6] + #reg_param: + # 0.4: 3 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.025, 0.05, 0.075, 0.1, 0.15, 0.20, 0.35, 0.5] + obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.16, 0.22, 0.37, 0.45] + #reg_param: + # 0.4: 3 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.008, 0.015, 0.025, 0.04, 0.06, 0.11, 0.25, 0.4] + obs_bins_det_sys_binning: [0, 0.01, 0.015, 0.02, 0.025, 0.045, 0.065, 0.12, 0.23, 0.35] + #reg_param: + # 0.4: 3 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.03, 0.05, 0.07, 0.1, 0.15, 0.25, 0.4, 0.6, 0.8] + obs_max_reported: [0.6] + obs_bins_det: [0, 0.02, 0.04, 0.06, 0.09, 0.17, 0.25, 0.4, 0.6, 0.8] + obs_bins_det_sys_binning: [0, 0.015, 0.03, 0.05, 0.1, 0.15, 0.25, 0.41, 0.58, 0.8] + #reg_param: + # 0.4: 3 + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + obs_max_reported: [0.5] + obs_bins_det: [0, 0.007, 0.015, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5, 0.7] + obs_bins_det_sys_binning: [0, 0.005, 0.01, 0.02, 0.03, 0.045, 0.11, 0.16, 0.27, 0.53, 0.7] + #reg_param: + # 0.4: 3 + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.085, 0.15, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.008, 0.015, 0.025, 0.045, 0.07, 0.1, 0.15, 0.2, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.003, 0.009, 0.015, 0.03, 0.05, 0.075, 0.1, 0.16, 0.21, 0.33, 0.6] + #reg_param: + # 0.4: 3 + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + obs_max_reported: [0.25] + obs_bins_det: [0, 0.001, 0.003, 0.008, 0.015, 0.03, 0.06, 0.1, 0.25, 0.5] + obs_bins_det_sys_binning: [0, 0.003, 0.015, 0.035, 0.07, 0.11, 0.24, 0.5] + #reg_param: + # 0.4: 3 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/pTcut/AngR04_ptbin3" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: True +force_rebin: True +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 +# - fastsim_generator2 + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1079543/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1080979/Scaled_no_cuts/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1081242/Scaled_no_cuts/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1081276/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1081277/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1081307/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1081308/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_ptbin4.yaml new file mode 100644 index 000000000..ba1e632a5 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/angularity_R0.4_ptbin4.yaml @@ -0,0 +1,197 @@ +# Processing parameters +jetR: [0.4] +alphas: [1, 1.5, 2, 3] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + pt_bins_reported: [80, 100] + plot_overlay_list: + - ['config_R0.4_1', 'config_R0.4_1.5', 'config_R0.4_2', 'config_R0.4_3'] + - ['config_R0.4_1_SD', 'config_R0.4_1.5_SD', 'config_R0.4_2_SD', 'config_R0.4_3_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different R & alpha configurations + config_R0.4_1: + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.05, 0.1, 0.2, 0.3, 0.55, 0.8] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.06, 0.09, 0.12, 0.2, 0.3, 0.4, 0.55, 0.8] + obs_bins_det_sys_binning: [0, 0.05, 0.1, 0.13, 0.21, 0.3, 0.41, 0.6, 0.75] + #reg_param: + # 0.4: 2 + + config_R0.4_1.5: + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.03, 0.06, 0.1, 0.2, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.035, 0.065, 0.09, 0.21, 0.48, 0.6] + #reg_param: + # 0.4: 2 + + config_R0.4_2: + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.05, 0.12, 0.4, 0.65] + obs_max_reported: [0.4] + obs_bins_det: [0, 0.02, 0.05, 0.12, 0.4, 0.65] + obs_bins_det_sys_binning: [0, 0.02, 0.055, 0.085, 0.13, 0.37, 0.45] + #reg_param: + # 0.4: 2 + + config_R0.4_3: + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] + obs_max_reported: [0.3] + obs_bins_det: [0, 0.01, 0.02, 0.04, 0.08, 0.3, 0.5] + obs_bins_det_sys_binning: [0, 0.005, 0.025, 0.045, 0.1, 0.33, 0.35] + #reg_param: + # 0.4: 2 + + ############################################################################ + # Different R & alpha configurations for SoftDrop + config_R0.4_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] + obs_max_reported: [0.55] + obs_bins_det: [0, 0.02, 0.05, 0.12, 0.25, 0.55, 0.8] + obs_bins_det_sys_binning: [0, 0.015, 0.045, 0.13, 0.27, 0.58, 0.8] + #reg_param: + # 0.4: 2 + + config_R0.4_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 1.5 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] + obs_max_reported: [0.45] + obs_bins_det: [0, 0.005, 0.015, 0.05, 0.17, 0.45, 0.7] + obs_bins_det_sys_binning: [0, 0.005, 0.015, 0.045, 0.06, 0.155, 0.47, 0.7] + #reg_param: + # 0.4: 2 + + config_R0.4_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] + obs_max_reported: [0.35] + obs_bins_det: [0, 0.002, 0.008, 0.05, 0.14, 0.35, 0.6] + obs_bins_det_sys_binning: [0, 0.001, 0.007, 0.06, 0.15, 0.33, 0.6] + #reg_param: + # 0.4: 2 + + config_R0.4_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + R: 0.4 + alpha: 3 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 40, 50, 60, 80, 100, 120] + obs_bins_truth: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] + obs_max_reported: [0.2] + obs_bins_det: [0, 0.002, 0.007, 0.03, 0.07, 0.2, 0.5] + obs_bins_det_sys_binning: [0, 0.001, 0.008, 0.035, 0.07, 0.11, 0.24, 0.5] + #reg_param: + # 0.4: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/pTcut/AngR04_ptbin4" +roounfold_path: "/home/ezra/heppy/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'ang' +do_unfolding: True +force_rebin: True +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 +# - fastsim_generator2 + +main_data: '/rstorage/alice/AnalysisResults/ang/1079543/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1080979/Scaled_no_cuts/AnalysisResultsFinal.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1081242/Scaled_no_cuts/AnalysisResultsFinal.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1081276/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1081277/Scaled_no_cuts/AnalysisResultsFinal.root' +# fastsim order: PYTHIA, Herwig +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1081307/Scaled_no_cuts/AnalysisResultsFinal.root', + '/rstorage/alice/AnalysisResults/ang/1081308/Scaled_no_cuts/AnalysisResultsFinal.root'] + +# Prior variation parameters +prior_variation_option: 1 +prior1_variation_parameter: 0.5 +prior2_variation_parameter: -0.5 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_fastsim_pTcut.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_fastsim_pTcut.yaml new file mode 100644 index 000000000..992db20ee --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_fastsim_pTcut.yaml @@ -0,0 +1,90 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Require a minimum leading track pT (in GeV/c) +# Comparison to Pb-Pb: 5 GeV +# Comparison to D0 analysis (scaled by m_T): +# 2 GeV --> 2.734 GeV +# 3 GeV --> 3.532 GeV +# 5 GeV --> 5.336 GeV +# 7 GeV --> 7.244 GeV +min_leading_track_pT: 5.336 + +fast_simulation: True +reject_tracks_fraction: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_gen_pTcut.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_gen_pTcut.yaml new file mode 100644 index 000000000..54e3eccf1 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_gen_pTcut.yaml @@ -0,0 +1,86 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + +# Uncomment for MC // comment for data +#reject_tracks_fraction: 0.02 + +# Require a minimum leading track pT (in GeV/c) +min_leading_track_pT: 5.336 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_pTcut.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_pTcut.yaml new file mode 100644 index 000000000..0812ba318 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_pTcut.yaml @@ -0,0 +1,90 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Require a minimum leading track pT (in GeV/c) +# Comparison to Pb-Pb: 5 GeV +# Comparison to D0 analysis (scaled by m_T): +# 2 GeV --> 2.734 GeV +# 3 GeV --> 3.532 GeV +# 5 GeV --> 5.336 GeV +# 7 GeV --> 7.244 GeV +min_leading_track_pT: 5.336 + +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_randmass_pTcut.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_randmass_pTcut.yaml new file mode 100644 index 000000000..ea369ebe7 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_randmass_pTcut.yaml @@ -0,0 +1,92 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Require a minimum leading track pT (in GeV/c) +# Comparison to Pb-Pb: 5 GeV +# Comparison to D0 analysis (scaled by m_T): +# 2 GeV --> 2.734 GeV +# 3 GeV --> 3.532 GeV +# 5 GeV --> 5.336 GeV +# 7 GeV --> 7.244 GeV +min_leading_track_pT: 5.336 + +reject_tracks_fraction: 0 + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: True # Whether to randomly assign K and p mass to some tracks + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_treff_pTcut.yaml b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_treff_pTcut.yaml new file mode 100644 index 000000000..93727cca8 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_treff_pTcut.yaml @@ -0,0 +1,89 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Require a minimum leading track pT (in GeV/c) +# Comparison to Pb-Pb: 5 GeV +# Comparison to D0 analysis (scaled by m_T): +# 2 GeV --> 2.734 GeV +# 3 GeV --> 3.532 GeV +# 5 GeV --> 5.336 GeV +# 7 GeV --> 7.244 GeV +min_leading_track_pT: 5.336 + +reject_tracks_fraction: 0.03 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin2.yaml b/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin2.yaml new file mode 100644 index 000000000..689783ff0 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin2.yaml @@ -0,0 +1,110 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [40, 60] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_max_reported: [7] + obs_bins_det: [0, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.4, 5, 5.6, 6.2, 7, 10] + obs_bins_det_sys_binning: [0, 0.5, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.2, 4.6, 5.2, + 5.8, 6.8, 8, 9, 12] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_max_reported: [7] + obs_bins_det: [0, 0.3, 0.6, 0.9, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.6, 4, 4.6, 5.2, 6, 7, 9] + obs_bins_det_sys_binning: [0, 0.4, 0.7, 1, 1.4, 1.8, 2.2, 2.6, 3, 3.4, 3.8, 4.4, 5, 6, 7, 10] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin2" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1110550/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1110651/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1110652/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1110653/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1110654/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1110655/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1110656/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root'] + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin3.yaml b/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin3.yaml new file mode 100644 index 000000000..b81acfeec --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin3.yaml @@ -0,0 +1,109 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [60, 80] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_max_reported: [10] + obs_bins_det: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_bins_det_sys_binning: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_max_reported: [10] + obs_bins_det: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12] + obs_bins_det_sys_binning: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin3" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1110550/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1110651/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1110652/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1110653/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1110654/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1110655/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1110656/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root'] + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin4.yaml b/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin4.yaml new file mode 100644 index 000000000..c8542d45b --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin4.yaml @@ -0,0 +1,109 @@ +# Processing parameters +jetR: [0.2] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Max eta value (for plots) +eta_max: 0.9 + +sd_zcut: 0.2 +sd_beta: 0 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for jet mass observables +mass: + + common_settings: + xtitle: '#it{m}_{jet}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + pt_bins_reported: [80, 100] + plot_overlay_list: + - ['config_m'] + - ['config_m_SD'] + max_reg_param: 10 + reg_param_variation: 1 + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 2, 4, 6, 8, 12, 16] + obs_max_reported: [12] + obs_bins_det: [0, 2, 4, 6, 8, 12, 16] + obs_bins_det_sys_binning: [0, 1.8, 3.6, 4.4, 5.2, 7, 9, 11, 13, 16, 20] + reg_param: + 0.2: 2 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + R: 0.2 + pt_bins_truth: [5, 20, 40, 60, 80, 100, 150, 200] + pt_bins_det: [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120, 150, 200] + pt_bins_det_sys_truncation: [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 100, 120] + obs_bins_truth: [0, 1.8, 3.6, 5.8, 8, 12, 16] + obs_max_reported: [12] + obs_bins_det: [0, 1.8, 3.6, 5.8, 8, 12, 16] + obs_bins_det_sys_binning: [0, 1.6, 3.2, 5.6, 7.6, 9, 12, 16, 20] + SoftDrop: + zcut: 0.2 + beta: 0 + reg_param: + 0.2: 2 + + +############################################################################### +# Analysis & plotting parameters +file_format: ".pdf" +output_dir: "/rstorage/alice/AnalysisResults/ang/pp/AngR02_ptbin4" +roounfold_path: "$HEPPY_DIR/external/roounfold/roounfold-current/lib/libRooUnfold.so" + +analysis_observable: 'mass' +do_unfolding: False +force_rebin: False +do_systematics: True +do_plot_final_result: True +do_plot_performance: False +figure_approval_status: 'Work In Progress' + +# Whether or not to use the previous measurement in ratio +use_prev_result: False + +# List of which systematics to perform +# Options: [main, trkeff, prior1, prior2, truncation, binning, \ +# random_mass, fastsim_generator0, fastsim_generator1] +systematics_list: + - main + - trkeff + - prior1 + - prior2 + - truncation + - binning + - random_mass + - fastsim_generator0 + - fastsim_generator1 + +# Paths to processing output, to be used for unfolding +main_data: '/rstorage/alice/AnalysisResults/ang/1110550/AnalysisResultsFinal.root' +main_response: '/rstorage/alice/AnalysisResults/ang/1110651/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +trkeff_response: '/rstorage/alice/AnalysisResults/ang/1110652/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +randmass_data: '/rstorage/alice/AnalysisResults/ang/1110653/AnalysisResultsFinal.root' +randmass_response: '/rstorage/alice/AnalysisResults/ang/1110654/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root' +# fastsim order: PYTHIA, Herwig +fastsim_response: ['/rstorage/alice/AnalysisResults/ang/1110655/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root', + '/rstorage/alice/AnalysisResults/ang/1110656/Scaled_no_cuts/AnalysisResultsFinal_no1-4.root'] + +# Prior variation parameters +prior_variation_option: 2 +prior1_variation_parameter: 0.1 +prior2_variation_parameter: -0.1 diff --git a/pyjetty/alice_analysis/config/ang/process_angularity.yaml b/pyjetty/alice_analysis/config/ang/process_angularity.yaml index 73f47321b..73a3453ab 100644 --- a/pyjetty/alice_analysis/config/ang/process_angularity.yaml +++ b/pyjetty/alice_analysis/config/ang/process_angularity.yaml @@ -1,26 +1,83 @@ # Processing parameters -jetR: [0.2, 0.4, 0.6] -betas: [1, 1.5, 2, 3] - -# Initial detector-level binnings. Distributions are rebinned via configs before unfolding -n_pt_bins: 195 -pt_limits: [5, 200] -n_lambda_bins: 160 -lambda_limits: [0, 0.8] -n_rap_bins: 50 # just for fun, look at lambda distribution vs rapidity -rap_limits: [-1, 1] - -# Mass assumption for track/jet reconstruction -track_mass: 0.13957 # Pion mass in GeV/c^2 -track_random_mass: False # Whether to randomly assign K and p mass to some tracks +jetR: [0.2, 0.4] # For extra verbose output in some functions debug_level: 0 -process_observables: ['ang'] +process_observables: ['ang', 'mass'] jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC reject_tracks_fraction: 0 -# SoftDrop setting (current same for all SD plots) -sd_zcut: 0.2 # multiplier -sd_beta: 0 # exponent on (deltaR / R) +# Uncomment for MC // comment for data +#reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/process_angularity_fastsim.yaml b/pyjetty/alice_analysis/config/ang/process_angularity_fastsim.yaml new file mode 100644 index 000000000..a06994725 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/process_angularity_fastsim.yaml @@ -0,0 +1,89 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: False # Whether to randomly assign K and p mass to some tracks + +# For extra verbose output in some functions +debug_level: 0 + +reject_tracks_fraction: 0 +fast_simulation: True +dry_run: False + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/process_angularity_randmass.yaml b/pyjetty/alice_analysis/config/ang/process_angularity_randmass.yaml new file mode 100644 index 000000000..6ffb40649 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/process_angularity_randmass.yaml @@ -0,0 +1,83 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +reject_tracks_fraction: 0 + +# Mass assumption for track/jet reconstruction +track_mass: 0.13957 # Pion mass in GeV/c^2 +track_random_mass: True # Whether to randomly assign K and p mass to some tracks + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/process_angularity_treff.yaml b/pyjetty/alice_analysis/config/ang/process_angularity_treff.yaml new file mode 100644 index 000000000..42fa53576 --- /dev/null +++ b/pyjetty/alice_analysis/config/ang/process_angularity_treff.yaml @@ -0,0 +1,80 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['ang', 'mass'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR + +reject_tracks_fraction: 0.03 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +ang: + + common_settings: + xtitle: '#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{#lambda}_{#it{#alpha}}^{#it{#kappa}=1}}' + + ############################################################################ + # Different alpha configurations + config_1: + alpha: 1 + + config_1.5: + alpha: 1.5 + + config_2: + alpha: 2 + + config_3: + alpha: 3 + + ############################################################################ + # Different alpha configurations with SoftDrop + config_1_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1 + + config_1.5_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 1.5 + + config_2_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 2 + + config_3_SD: + SoftDrop: + zcut: 0.2 + beta: 0 + alpha: 3 + + +############################################################################### +mass: + + common_settings: + xtitle: '#it{m}_{jet} (GeV/#it{c}^{2})' + ytitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{m}_{jet}}' + + ############################################################################ + # Different alpha configurations + config_m: + name: "mass" + + ############################################################################ + # Different alpha configurations with SoftDrop + config_m_SD: + name: "groomed mass" + SoftDrop: + zcut: 0.2 + beta: 0 diff --git a/pyjetty/alice_analysis/config/ang/randmass_angularity.yaml b/pyjetty/alice_analysis/config/ang/randmass_angularity.yaml deleted file mode 100644 index 59c2d776f..000000000 --- a/pyjetty/alice_analysis/config/ang/randmass_angularity.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Processing parameters -jetR: [0.2, 0.4, 0.6] -betas: [1, 1.5, 2, 3] - -# Initial detector-level binnings. Distributions are rebinned via configs before unfolding -n_pt_bins: 195 -pt_limits: [5, 200] -n_lambda_bins: 160 -lambda_limits: [0, 0.8] -n_rap_bins: 50 # just for fun, look at lambda distribution vs rapidity -rap_limits: [-1, 1] - -# Mass assumption for track/jet reconstruction -track_mass: 0.13957 # Pion mass in GeV/c^2 -track_random_mass: True # Whether to randomly assign K and p mass to some tracks - -# For extra verbose output in some functions -debug_level: 0 - -process_observables: ['ang'] -jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR -reject_tracks_fraction: 0 - -# SoftDrop setting (current same for all SD plots) -sd_zcut: 0.2 # multiplier -sd_beta: 0 # exponent on (deltaR / R) diff --git a/pyjetty/alice_analysis/config/ang/treff_angularity.yaml b/pyjetty/alice_analysis/config/ang/treff_angularity.yaml deleted file mode 100644 index 203ed3f60..000000000 --- a/pyjetty/alice_analysis/config/ang/treff_angularity.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Processing parameters -jetR: [0.2, 0.4, 0.6] -betas: [1, 1.5, 2, 3] - -# Initial detector-level binnings. Distributions are rebinned via configs before unfolding -n_pt_bins: 195 -pt_limits: [5, 200] -n_lambda_bins: 160 -lambda_limits: [0, 0.8] -n_rap_bins: 50 # just for fun, look at lambda distribution vs rapidity -rap_limits: [-1, 1] - -# Mass assumption for track/jet reconstruction -track_mass: 0.13957 # Pion mass in GeV/c^2 -track_random_mass: False # Whether to randomly assign K and p mass to some tracks - -# For extra verbose output in some functions -debug_level: 0 - -process_observables: ['ang'] -jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR -reject_tracks_fraction: 0.04 - -# SoftDrop setting (current same for all SD plots) -sd_zcut: 0.2 # multiplier -sd_beta: 0 # exponent on (deltaR / R) diff --git a/pyjetty/alice_analysis/config/lund/process_lund.yaml b/pyjetty/alice_analysis/config/lund/process_lund.yaml new file mode 100644 index 000000000..89e3246d1 --- /dev/null +++ b/pyjetty/alice_analysis/config/lund/process_lund.yaml @@ -0,0 +1,41 @@ +# Processing parameters +jetR: [0.2, 0.4] + +# For extra verbose output in some functions +debug_level: 0 + +process_observables: ['lund'] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +# Uncomment for data // comment for MC +reject_tracks_fraction: 0 + +# Uncomment for MC // comment for data +#reject_tracks_fraction: 0.02 + + +############################################################################### +# Rebinning and unfolding parameters/configurations for angularity observables +lund: + + common_settings: + xtitle: '#it{z}_{g}' + ytitle: '#it{#theta}_{g}' + ztitle: '#frac{1}{#it{#sigma}_{jet}} #frac{d#it{#sigma}}{d#it{z}_{g} d#it{#theta}_{g}}' + + config1: + SoftDrop: + zcut: 0.2 + beta: 0 + + config2: + DynamicalGrooming: + a: 'late_kt_0.5' + + config3: + DynamicalGrooming: + a: 'late_kt_1.0' + + config4: + DynamicalGrooming: + a: 'late_kt_2.0' + diff --git a/pyjetty/alice_analysis/config/theta_g/pp/james_pp.yaml b/pyjetty/alice_analysis/config/theta_g/pp/james_pp.yaml index 155f5199b..aa7f6e20c 100644 --- a/pyjetty/alice_analysis/config/theta_g/pp/james_pp.yaml +++ b/pyjetty/alice_analysis/config/theta_g/pp/james_pp.yaml @@ -20,11 +20,6 @@ dry_run: False fast_simulation: False #reclustering_algorithm: 'AKT' -# Parameters for generating theory folding RMs -response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] -theory_pt_bins: [10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, - 85, 90, 95, 100, 105, 110, 115, 120, 125, 130, 135, 140, 145, - 150, 155, 160, 165, 170, 175, 180, 185, 190, 195, 200] # Observable block theta_g: @@ -188,6 +183,35 @@ zg: obs_bins_det: [0., 0.03, 0.06, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5] obs_bins_det_sys_binning: [0., 0.05, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.25, 0.28, 0.31, 0.34, 0.37, 0.4, 0.43, 0.46, 0.5] + +############################################################################### +# Theory comparison parameters +do_theory_comp: True +th_fold_observable: "theta_g" +#response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] +response_levels: [["p", "ch", "off"]] +theory_pt_bins: [10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, + 85, 90, 95, 100, 105, 110, 115, 120, 125, 130, 135, 140, 145, + 150, 155, 160, 165, 170, 175, 180, 185, 190, 195, 200] +final_pt_bins: [10, 20, 40, 60, 80, 100, 150, 200] +theory_obs_bins: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.10, + 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2 , 0.21, + 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3 , 0.31, 0.32, + 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4 , 0.41, 0.42, 0.43, + 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5 , 0.51, 0.52, 0.53, 0.54, + 0.55, 0.56, 0.57, 0.58, 0.59, 0.6 , 0.61, 0.62, 0.63, 0.64, 0.65, + 0.66, 0.67, 0.68, 0.69, 0.7 , 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, + 0.77, 0.78, 0.79, 0.8 , 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, + 0.88, 0.89, 0.9 , 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, + 0.99, 1. ] +theory_dir: "/home/ezra/theory_predictions/theta_g/" +pt_scale_factors_path: "/home/ezra/theory_predictions/" +response_files: ["/rstorage/alice/AnalysisResults/theta_g/871287/AnalysisResultsFinal_no1-3.root", + "/rstorage/alice/AnalysisResults/theta_g/879817/AnalysisResultsFinal_no1-3.root"] +response_labels: ["PYTHIA8", "Herwig7"] +rebin_theory_response: True + + ############################################################################### # Analysis parameters diff --git a/pyjetty/alice_analysis/config/theta_g/pp/james_pp_gen.yaml b/pyjetty/alice_analysis/config/theta_g/pp/james_pp_gen.yaml new file mode 100644 index 000000000..e28233f46 --- /dev/null +++ b/pyjetty/alice_analysis/config/theta_g/pp/james_pp_gen.yaml @@ -0,0 +1,66 @@ +# Configuration for: +# (a) Processing of ROOT trees into histograms +# (b) Analysis of histograms into final result +# +# The observables are structured into observable blocks (e.g. theta_g, subjet_z, etc.) +# each containing multiple individual subconfigurations (e.g. zcut, beta, subjetR, etc.), +# where each subconfiguration corresponds to a single unfolded result. +# One can also specify a list of jetR, which will be looped over. +# +# The process step is intended to loop over all observables simultaneously +# The analysis step is intended to run for a single observable block + +process_observables: ['theta_g'] +jetR: [0.4] +jet_matching_distance: 0.6 # Match jets with deltaR < jet_matching_distance*jetR +reject_tracks_fraction: 0.0 +eta_max: 0.9 +debug_level: 0 +dry_run: False +fast_simulation: False +#reclustering_algorithm: 'AKT' + + +# Observable block +theta_g: + + common_settings: + xtitle: '#it{#theta}_{g}' + ytitle: '#frac{1}{#it{#sigma}_{jet, inc}} #frac{d#it{#sigma}}{d#it{#theta}_{g}}' + + config1: + SoftDrop: + zcut: 0.1 + beta: 0 + + config2: + SoftDrop: + zcut: 0.1 + beta: 1 + + config3: + SoftDrop: + zcut: 0.1 + beta: 2 + + +############################################################################### +# Theory comparison parameters +do_theory_comp: True +th_fold_observable: "theta_g" +#response_levels: [["p", "ch", "off"], ["p", "h", "off"], ["h", "ch", "on"]] +response_levels: [["p", "h", "off"]] +theory_pt_bins: [10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, + 85, 90, 95, 100, 105, 110, 115, 120, 125, 130, 135, 140, 145, + 150, 155, 160, 165, 170, 175, 180, 185, 190, 195, 200] +final_pt_bins: [10, 20, 40, 60, 80, 100, 150, 200] +theory_obs_bins: [0. , 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1 , + 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2 , 0.21, + 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3 , 0.31, 0.32, + 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4 , 0.41, 0.42, 0.43, + 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5 , 0.51, 0.52, 0.53, 0.54, + 0.55, 0.56, 0.57, 0.58, 0.59, 0.6 , 0.61, 0.62, 0.63, 0.64, 0.65, + 0.66, 0.67, 0.68, 0.69, 0.7 , 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, + 0.77, 0.78, 0.79, 0.8 , 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, + 0.88, 0.89, 0.9 , 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, + 0.99, 1. ] diff --git a/pyjetty/alice_analysis/process/base/TrackEfficiencyConfiguration.yaml b/pyjetty/alice_analysis/process/base/TrackEfficiencyConfiguration.yaml new file mode 100644 index 000000000..87bc9797f --- /dev/null +++ b/pyjetty/alice_analysis/process/base/TrackEfficiencyConfiguration.yaml @@ -0,0 +1,25 @@ +# pT-based tracking efficiency cuts +# Studies by Jaime Norman + +ptBinning: [0.0, 0.5, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 999.0] + +# pp data, sqrt(s) = 5.02 TeV +LHC17p: + "0_100": [0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97] +LHC17q: + "0_100": [0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97] + +# Pb-Pb data, sqrt(s_NN) = 5.02 TeV +LHC18q: + "0_10": [0.964, 0.964 , 0.939 , 0.936 , 0.949 , 0.958 , 0.976 , 0.967 , 0.967 , 0.967 , 0.967 , 0.976 , 0.976 , 0.984 , 0.976 , 0.976] + "30_50": [0.966, 0.966 , 0.958 , 0.958 , 0.958 , 0.967 , 0.976 , 0.984 , 0.984 , 0.983 , 0.983 , 0.983 , 0.983 , 0.983 , 0.983, 0.983] +LHC18r: + "0_10": [0.963, 0.963 , 0.940 , 0.936 , 0.949 , 0.958 , 0.977 , 0.967 , 0.976 , 0.966 , 0.975 , 0.975 , 0.975 , 0.982 , 0.982, 0.982] + "30_50": [0.965, 0.965 , 0.958 , 0.956 , 0.959 , 0.967 , 0.986 , 0.978 , 0.986 , 0.978 , 0.986 , 0.986 , 0.986 , 0.986 , 0.986 , 0.986] + +# Averaged values for pp and Pb-Pb +LHC17pq: + "0_100": [0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97] +LHC18qr: + "0_10": [0.9635, 0.9635, 0.9395, 0.936, 0.949, 0.958, 0.9765, 0.967, 0.9715, 0.9665, 0.971, 0.9755, 0.9755, 0.983, 0.979, 0.979] + "30_50": [0.9655, 0.9655, 0.958, 0.957, 0.9585, 0.967, 0.981, 0.981, 0.985, 0.9805, 0.9845, 0.9845, 0.9845, 0.9845, 0.9845, 0.9845] diff --git a/pyjetty/alice_analysis/process/base/common_utils.py b/pyjetty/alice_analysis/process/base/common_utils.py index b68d45005..bbe37d9a2 100755 --- a/pyjetty/alice_analysis/process/base/common_utils.py +++ b/pyjetty/alice_analysis/process/base/common_utils.py @@ -46,7 +46,9 @@ def obs_settings(self, observable, obs_config_dict, obs_subconfig_list): elif observable == 'jet_axis': return [obs_config_dict[name]['axis'] for name in obs_subconfig_list] elif observable == 'ang': - return [obs_config_dict[name]['beta'] for name in obs_subconfig_list] + return [obs_config_dict[name]['alpha'] for name in obs_subconfig_list] + elif observable == 'theta_g': + return [obs_config_dict[name]['SoftDrop'] for name in obs_subconfig_list if 'SoftDrop' in obs_config_dict[name]] # Else observable not implemented return [None for _ in obs_subconfig_list] diff --git a/pyjetty/alice_analysis/process/base/process_base.py b/pyjetty/alice_analysis/process/base/process_base.py index 34c7fda2b..5c0196cab 100755 --- a/pyjetty/alice_analysis/process/base/process_base.py +++ b/pyjetty/alice_analysis/process/base/process_base.py @@ -55,21 +55,21 @@ def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, # Initialize utils class self.utils = process_utils.ProcessUtils() - + #--------------------------------------------------------------- # Initialize config file into class members #--------------------------------------------------------------- def initialize_config(self): - + # Read config file with open(self.config_file, 'r') as stream: config = yaml.safe_load(stream) - + if 'event_number_max' in config: self.event_number_max = config['event_number_max'] else: self.event_number_max = sys.maxsize - + self.jetR_list = config['jetR'] self.debug_level = config['debug_level'] @@ -79,7 +79,7 @@ def initialize_config(self): print('Constituent subtractor is enabled.') self.do_constituent_subtraction = True constituent_subtractor = config['constituent_subtractor'] - + self.max_distance = constituent_subtractor['max_distance'] self.alpha = constituent_subtractor['alpha'] if 'max_eta' in constituent_subtractor: @@ -87,9 +87,14 @@ def initialize_config(self): self.bge_rho_grid_size = constituent_subtractor['bge_rho_grid_size'] self.max_pt_correct = constituent_subtractor['max_pt_correct'] self.ghost_area = constituent_subtractor['ghost_area'] + + # Optional flag to also create distributions without subtraction + self.include_no_subtraction = constituent_subtractor['include_no_subtraction'] if \ + 'include_no_subtraction' in constituent_subtractor else False + else: print('Constituent subtractor is disabled.') - + # Set reclustering algorithm (optional) if 'reclustering_algorithm' in config: self.recluster_alg = config['reclustering_algorithm'] @@ -102,18 +107,15 @@ def initialize_config(self): else: self.recluster_alg = 'CA' self.reclustering_algorithm = fj.cambridge_algorithm - - if 'm' in config: - self.m = config['m'] - else: - self.m = 0.1396 + + self.m = config['m'] if 'm' in config else 0.1396 #--------------------------------------------------------------- # Create thn and set as class attribute from name, dim # and lists of nbins, xmin, xmax. #--------------------------------------------------------------- def create_thn(self, name, title, dim, nbins, xmin, xmax): - + nbins_arr = (nbins) xmin_arr = (min) xmax_arr = (max) @@ -159,7 +161,7 @@ def lund_coordinates(self, jet_groomed_lund): # Compare two jets and store matching candidates in user_info #--------------------------------------------------------------- def set_matching_candidates(self, jet1, jet2, jetR, hname, fill_jet1_matches_only=False): - + # Fill histogram of matching distance of all candidates deltaR = jet1.delta_R(jet2) if hname: @@ -182,12 +184,12 @@ def set_jet_info(self, jet, jet_match, deltaR): jet_user_info = jet.python_info() else: jet_user_info = jet_info.JetInfo() - + jet_user_info.matching_candidates.append(jet_match) if deltaR < jet_user_info.closest_jet_deltaR: jet_user_info.closest_jet = jet_match jet_user_info.closest_jet_deltaR = deltaR - + jet.set_python_info(jet_user_info) #--------------------------------------------------------------- diff --git a/pyjetty/alice_analysis/process/base/process_io.py b/pyjetty/alice_analysis/process/base/process_io.py index 8e59d5f1f..1f9887540 100755 --- a/pyjetty/alice_analysis/process/base/process_io.py +++ b/pyjetty/alice_analysis/process/base/process_io.py @@ -3,7 +3,7 @@ """ Analysis IO class for jet analysis with track dataframe. Each instance of the class handles the IO of a *single* track tree. - + Authors: James Mulligan Mateusz Ploskon Ezra Lesser @@ -13,11 +13,15 @@ import os # for creating file on output import sys +import pathlib # for obtaining this file's directory +import yaml # to load tr. eff. uncertainty # Data analysis and plotting +import ROOT import uproot import pandas import numpy as np +from array import array # Fastjet via python (from external library fjpydev) import fastjet as fj @@ -28,7 +32,7 @@ ################################################################ class ProcessIO(common_base.CommonBase): - + #--------------------------------------------------------------- # Constructor #--------------------------------------------------------------- @@ -36,7 +40,7 @@ def __init__(self, input_file='', tree_dir='PWGHF_TreeCreator', track_tree_name='tree_Particle', event_tree_name='tree_event_char', output_dir='', is_pp=True, min_cent=0., max_cent=10., use_ev_id_ext=True, is_jetscape=False, holes=False, - event_plane_range=None, skip_event_tree=False, **kwargs): + event_plane_range=None, skip_event_tree=False, is_jewel=False, **kwargs): super(ProcessIO, self).__init__(**kwargs) self.input_file = input_file self.output_dir = output_dir @@ -48,6 +52,7 @@ def __init__(self, input_file='', tree_dir='PWGHF_TreeCreator', self.is_pp = is_pp self.use_ev_id_ext = use_ev_id_ext self.is_jetscape = is_jetscape + self.is_jewel = is_jewel self.holes = holes self.event_plane_range = event_plane_range self.skip_event_tree = skip_event_tree @@ -57,8 +62,8 @@ def __init__(self, input_file='', tree_dir='PWGHF_TreeCreator', # Set the combination of fields that give a unique event id self.unique_identifier = ['run_number', 'ev_id'] - if self.use_ev_id_ext: - self.unique_identifier += ['ev_id_ext'] + #if self.use_ev_id_ext: + # self.unique_identifier += ['ev_id_ext'] # Set relevant columns of event tree self.event_columns = self.unique_identifier + ['z_vtx_reco', 'is_ev_rej'] @@ -73,7 +78,8 @@ def __init__(self, input_file='', tree_dir='PWGHF_TreeCreator', self.track_columns = self.unique_identifier + ['ParticlePt', 'ParticleEta', 'ParticlePhi'] if is_jetscape: self.track_columns += ['status'] - + if is_jewel: + self.track_columns += ["Status"] #print(self) #--------------------------------------------------------------- @@ -91,16 +97,70 @@ def reset_dataframes(self): #--------------------------------------------------------------- def load_data(self, m=0.1396, reject_tracks_fraction=0., offset_indices=False, group_by_evid=True, random_mass=False, min_pt=0.): - + self.reject_tracks_fraction = reject_tracks_fraction + treff_bins, pT_edges = None, None # Only used when pT-based cut is used + try: + float(self.reject_tracks_fraction) + except ValueError: + if self.reject_tracks_fraction != "LHC17pq" and self.reject_tracks_fraction != "LHC18qr": + raise ValueError("reject_tracks_fraction = %s not implemented" % \ + self.reject_tracks_fraction) + + # For now if doing LHC17pq we are just doing a random 3% cut, so use simpler code + if self.reject_tracks_fraction == "LHC17pq": + #centrality = "0_100" + self.reject_tracks_fraction = 0.03 + + else: + # Assume Pb-Pb is 0-10% centrality (30-50% also available) + centrality = "0_10" + + # Load correct pT array from config file + treff_config = None + treff_yaml = os.path.join(str(pathlib.Path(__file__).parent.resolve()), + "TrackEfficiencyConfiguration.yaml") + with open(treff_yaml, 'r') as stream: + treff_config = yaml.safe_load(stream) + treff_bins = treff_config[self.reject_tracks_fraction][centrality] + pT_edges = treff_config["ptBinning"] + + # In the case of Pb-Pb, we want to apply an additional 2% track cut to account + # for the difference between the pp simulation and the actual Pb-Pb tr. eff. + + # P(A or B) = P(A) + P(B) - P(A and B) + # = 0.02 + (1 - val) - 0.02 * (1 - val) = 1 - 0.98 * val + # so 1 - P(A or B) = 1 - (1 - 0.98 * val) = 0.98 * val + treff_bins = [ 0.98 * val for val in treff_bins ] + self.reset_dataframes() print('Convert ROOT trees to pandas dataframes...') print(' track_tree_name = {}'.format(self.track_tree_name)) self.track_df = self.load_dataframe() - - if self.reject_tracks_fraction > 1e-3: + + if treff_bins != None: + # Apply pT-based track cut + print(" Removing tracks from %s using pT-based approach" % self.track_tree_name) + + # Get indices to delete + #print(" * Generating random numbers array...") + np.random.seed() + random_array = np.random.rand(len(self.track_df.index)) + print(" * Calculating correct bin edges and getting probabilities...") + probs = self.track_df["ParticlePt"].apply( + lambda x: treff_bins[np.searchsorted(pT_edges, x) - 1]) + print(" * Calculating indices to cut...") + indices_to_cut = self.track_df.index[np.greater(random_array, probs).astype(bool)] + + # Delete all tagged indices + print(" Removing %i of %i tracks from %s" % \ + (len(indices_to_cut), len(self.track_df.index), self.track_tree_name)) + self.track_df.drop(np.array(indices_to_cut), inplace=True) + + elif self.reject_tracks_fraction > 1e-3: + # Apply simple track cut n_remove = int(reject_tracks_fraction * len(self.track_df.index)) print(' Removing {} of {} tracks from {}'.format( n_remove, len(self.track_df.index), self.track_tree_name)) @@ -109,12 +169,13 @@ def load_data(self, m=0.1396, reject_tracks_fraction=0., offset_indices=False, self.track_df.drop(indices_remove, inplace=True) if random_mass: - print(' \033[93mRandomly assigning proton and kaon mass to some tracks.\033[0m') + print(' \033[93mRandomly assigning proton and kaon mass to some tracks.\033[0m') - df_fjparticles = self.group_fjparticles(m, offset_indices, group_by_evid, random_mass, min_pt=min_pt) + df_fjparticles = self.group_fjparticles( + m, offset_indices, group_by_evid, random_mass, min_pt=min_pt) return df_fjparticles - + #--------------------------------------------------------------- # Convert ROOT TTree to pandas dataframe # Return merged track+event dataframe from a given input file @@ -123,6 +184,11 @@ def load_data(self, m=0.1396, reject_tracks_fraction=0., offset_indices=False, #--------------------------------------------------------------- def load_dataframe(self): + self.skip_event_tree = True + self.tree_dir = "" + self.track_tree_name = "tree_Particle_gen_h_MPIon" + self.track_columns = self.unique_identifier + ['ParticlePx', 'ParticlePy', 'ParticlePz', 'ParticleE'] + # Load event tree into dataframe if not self.skip_event_tree: event_tree = None @@ -132,7 +198,7 @@ def load_dataframe(self): if not event_tree: raise ValueError("Tree %s not found in file %s" % (event_tree_name, self.input_file)) self.event_df_orig = uproot.concatenate(event_tree, self.event_columns, library="pd") - + # Check if there are duplicated event ids #print(self.event_df_orig) #d = self.event_df_orig.duplicated(self.unique_identifier, keep=False) @@ -141,7 +207,7 @@ def load_dataframe(self): if n_duplicates > 0: raise ValueError( "There appear to be %i duplicate events in the event dataframe" % n_duplicates) - + # Apply event selection self.event_df_orig.reset_index(drop=True) if self.is_pp: @@ -161,7 +227,7 @@ def load_dataframe(self): if not track_tree: raise ValueError("Tree %s not found in file %s" % (track_tree_name, self.input_file)) track_df_orig = uproot.concatenate(track_tree, self.track_columns, library="pd") - + # Apply hole selection, in case of jetscape if self.is_jetscape: if self.holes: @@ -170,7 +236,14 @@ def load_dataframe(self): track_criteria = 'status == 0' track_df_orig = track_df_orig.query(track_criteria) track_df_orig.reset_index(drop=True) - + + # JEWEL remove Status == 3 particles + elif self.is_jewel: + # Remove thermals (Status == 3) and ghosts (small pT) + track_criteria = 'Status != 3 and ParticlePt > 1e-5' + track_df_orig = track_df_orig.query(track_criteria) + track_df_orig.reset_index(drop=True) + # Check if there are duplicated tracks #print(track_df_orig) #d = track_df_orig.duplicated(self.track_columns, keep=False) @@ -185,15 +258,16 @@ def load_dataframe(self): self.track_df = track_df_orig else: self.track_df = pandas.merge(track_df_orig, event_df, on=self.unique_identifier) - + # Check if there are duplicated tracks in the merge dataframe #print(self.track_df) #d = self.track_df.duplicated(self.track_columns, keep=False) #print(self.track_df[d]) n_duplicates = sum(self.track_df.duplicated(self.track_columns)) if n_duplicates > 0: - sys.exit('ERROR: There appear to be {} duplicate particles in the merged dataframe'.format(n_duplicates)) - + raise ValueError( + 'There appear to be %i duplicate particles in the merged dataframe' % n_duplicates) + return self.track_df #--------------------------------------------------------------- @@ -201,81 +275,148 @@ def load_dataframe(self): # with the same formatting and saves to class's output_file. # histograms is list of tuples: [ ("title", np.histogram), ... ] #--------------------------------------------------------------- - def save_dataframe(self, filename, df, df_true=False, histograms=[], is_jetscape=False): + def save_dataframe(self, filename, df, df_true=False, histograms=[], is_jetscape=False, is_jewel=False): # Create output directory if it does not already exist if not os.path.exists(self.output_dir): os.makedirs(self.output_dir) # Open output directory and (re)create rootfile - with uproot.recreate(self.output_dir + filename) as f: - - branchdict = {"run_number": int, "ev_id": int, "ParticlePt": float, - "ParticleEta": float, "ParticlePhi": float} + f = ROOT.TFile(self.output_dir + filename, "recreate") + f.cd() + + if df_true: + # Create tree with truth particle info + title = 'tree_Particle_gen' + print("Length of truth track tree: %i" % len(self.track_df)) + t = ROOT.TTree(title, title) + + # Initialize branches in TTree + run_number = None + ev_id = array('i', [-1]) + t.Branch("ev_id", ev_id, "ev_id/I") + ParticlePt = array('f', [-1.]) + t.Branch("ParticlePt", ParticlePt, "ParticlePt/F") + ParticleEta = array('f', [-1.]) + t.Branch("ParticleEta", ParticleEta, "ParticleEta/F") + ParticlePhi = array('f', [-1.]) + t.Branch("ParticlePhi", ParticlePhi, "ParticlePhi/F") + status = array('i', [-1]) if is_jetscape: - branchdict["status"] = int - - if df_true: - # Create tree with truth particle info - title = 'tree_Particle_gen' - print("Length of truth track tree: %i" % len(self.track_df)) - f.mktree(name=title, branch_types=branchdict, title=title) + t.Branch("status", status, "status/I") + if is_jewel: + t.Branch("Status", status, "Status/I") + run_number = array('f', [-1]) + t.Branch("run_number", run_number, "run_number/F") + else: + run_number = array('i', [-1]) + t.Branch("run_number", run_number, "run_number/I") + + for index, row in self.track_df.iterrows(): + ev_id[0] = int(row["ev_id"]) + ParticlePt[0] = row["ParticlePt"] + ParticleEta[0] = row["ParticleEta"] + ParticlePhi[0] = row["ParticlePhi"] if is_jetscape: - f[title].extend( { "run_number": self.track_df["run_number"], - "ev_id": self.track_df["ev_id"], - "ParticlePt": self.track_df["ParticlePt"], - "ParticleEta": self.track_df["ParticleEta"], - "ParticlePhi": self.track_df["ParticlePhi"], - "status": self.track_df["status"] } ) + status[0] = int(row["status"]) + if is_jewel: + run_number[0] = row["run_number"] + status[0] = int(row["Status"]) else: - f[title].extend( { "run_number": self.track_df["run_number"], - "ev_id": self.track_df["ev_id"], - "ParticlePt": self.track_df["ParticlePt"], - "ParticleEta": self.track_df["ParticleEta"], - "ParticlePhi": self.track_df["ParticlePhi"] } ) - - # Create tree with detector-level particle info - title = 'tree_Particle' - print("Length of detector-level track tree: %i" % len(df)) - f.mktree(name=title, branch_types=branchdict, title=title) + run_number[0] = int(row["run_number"]) + t.Fill() + t.Write() + + # Create tree with detector-level particle info + title = 'tree_Particle' + print("Length of detector-level track tree: %i" % len(df)) + f.cd() + t = ROOT.TTree(title, title) + + # Initialize branches in TTree + run_number = None + ev_id = array('i', [-1]) + t.Branch("ev_id", ev_id, "ev_id/I") + ParticlePt = array('f', [-1.]) + t.Branch("ParticlePt", ParticlePt, "ParticlePt/F") + ParticleEta = array('f', [-1.]) + t.Branch("ParticleEta", ParticleEta, "ParticleEta/F") + ParticlePhi = array('f', [-1.]) + t.Branch("ParticlePhi", ParticlePhi, "ParticlePhi/F") + status = array('i', [-1]) + if is_jetscape or is_jewel: + t.Branch("status", status, "status/I") + if is_jewel: + t.Branch("Status", status, "Status/I") + run_number = array('f', [-1]) + t.Branch("run_number", run_number, "run_number/F") + else: + run_number = array('i', [-1]) + t.Branch("run_number", run_number, "run_number/I") + + for index, row in df.iterrows(): + ev_id[0] = int(row["ev_id"]) + ParticlePt[0] = row["ParticlePt"] + ParticleEta[0] = row["ParticleEta"] + ParticlePhi[0] = row["ParticlePhi"] if is_jetscape: - f[title].extend( { "run_number": df["run_number"], - "ev_id": df["ev_id"], - "ParticlePt": df["ParticlePt"], - "ParticleEta": df["ParticleEta"], - "ParticlePhi": df["ParticlePhi"], - "status": df["status"] } ) + status[0] = int(row["status"]) + if is_jewel: + run_number[0] = row["run_number"] + status[0] = int(row["Status"]) else: - f[title].extend( { "run_number": df["run_number"], - "ev_id": df["ev_id"], - "ParticlePt": df["ParticlePt"], - "ParticleEta": df["ParticleEta"], - "ParticlePhi": df["ParticlePhi"] } ) - - # Create tree with event char - title = self.event_tree_name - branchdict = {"is_ev_rej": int, "run_number": int, "ev_id": int, "z_vtx_reco": float} - if is_jetscape: - branchdict["event_plane_angle"] = float - f.mktree(name=title, branch_types=branchdict, title=title) + run_number[0] = int(row["run_number"]) + t.Fill() + t.Write() + + # Create tree with event char + title = self.event_tree_name + f.cd() + t = ROOT.TTree(title, title) + + # Initialize branches in TTree + is_ev_rej = array('i', [-1]) + t.Branch("is_ev_rej", is_ev_rej, "is_ev_rej/I") + run_number = None + ev_id = array('i', [-1]) + t.Branch("ev_id", ev_id, "ev_id/I") + z_vtx_reco = array('f', [-1.]) + t.Branch("z_vtx_reco", z_vtx_reco, "z_vtx_reco/F") + event_plane_angle = array('f', [-1.]) + if is_jetscape: + t.Branch("event_plane_angle", event_plane_angle, "event_plane_angle/F") + if is_jewel: + run_number = array('f', [-1.]) + t.Branch("run_number", run_number, "run_number/F") + else: + run_number = array('i', [-1]) + t.Branch("run_number", run_number, "run_number/I") + + for index, row in self.event_df_orig.iterrows(): + is_ev_rej[0] = row["is_ev_rej"] + run_number[0] = row["run_number"] + ev_id[0] = int(row["ev_id"]) + z_vtx_reco[0] = row["z_vtx_reco"] if is_jetscape: - f[title].extend( {"is_ev_rej": self.event_df_orig["is_ev_rej"], - "run_number": self.event_df_orig["run_number"], - "ev_id": self.event_df_orig["ev_id"], - "z_vtx_reco": self.event_df_orig["z_vtx_reco"], - "event_plane_angle": self.event_df_orig["event_plane_angle"] } ) + event_plane_angle[0] = row["event_plane_angle"] + if is_jewel: + run_number[0] = row["run_number"] else: - f[title].extend( {"is_ev_rej": self.event_df_orig["is_ev_rej"], - "run_number": self.event_df_orig["run_number"], - "ev_id": self.event_df_orig["ev_id"], - "z_vtx_reco": self.event_df_orig["z_vtx_reco"] } ) - - # Write hNevents histogram: number of accepted events at detector level - f["hNevents"] = ( np.array([ 0, df["ev_id"].nunique() ]), np.array([ -0.5, 0.5, 1.5 ]) ) - - # Write histograms to file too, if any are passed - for title, h in histograms: - f[title] = h + run_number[0] = int(row["run_number"]) + t.Fill() + t.Write() + + # Write hNevents histogram: number of accepted events at detector level + f.cd() + hNevents = ROOT.TH1F("hNevents", "hNevents", 2, array('f', [-0.5, 0.5, 1.5]) ) + hNevents.Fill(1, df["ev_id"].nunique()) + hNevents.Write() + + # Write histograms to file too, if any are passed + for title, h in histograms: + h.Write(title) + + f.Close() #--------------------------------------------------------------- # Transform the track dataframe into a SeriesGroupBy object @@ -290,20 +431,20 @@ def group_fjparticles(self, m, offset_indices=False, group_by_evid=True, random_ # track_df_grouped is a DataFrameGroupBy object with one track dataframe per event track_df_grouped = None track_df_grouped = self.track_df.groupby(self.unique_identifier) - + # (ii) Transform the DataFrameGroupBy object to a SeriesGroupBy of fastjet particles df_fjparticles = None df_fjparticles = track_df_grouped.apply( - self.get_fjparticles, m=m, offset_indices=offset_indices, random_mass=random_mass, min_pt=min_pt) - + self.get_fjparticles, m, offset_indices, random_mass, min_pt) + else: print("Transform the track dataframe into a dataframe of fastjet particles per track...") # Transform into a DataFrame of fastjet particles - df = self.track_df - df_fjparticles = pandas.DataFrame( - {"run_number": df["run_number"], "ev_id": df["ev_id"], - "fj_particle": self.get_fjparticles(self.track_df, m, offset_indices, random_mass, min_pt=min_pt)} ) + df_fjparticles = pandas.DataFrame( + {"run_number": self.track_df["run_number"], "ev_id": self.track_df["ev_id"], + "fj_particle": self.get_fjparticles( + self.track_df, m, offset_indices, random_mass, min_pt)} ) return df_fjparticles @@ -311,16 +452,23 @@ def group_fjparticles(self, m, offset_indices=False, group_by_evid=True, random_ # Return fastjet:PseudoJets from a given track dataframe #--------------------------------------------------------------- def get_fjparticles(self, df_tracks, m, offset_indices=False, random_mass=False, min_pt=0.): - + # If offset_indices is true, then offset the user_index by a large negative value user_index_offset = 0 if offset_indices: user_index_offset = int(-1e6) - - # Apply a pt cut - df_tracks_accepted = df_tracks[df_tracks.ParticlePt > min_pt] - m_array = np.full((df_tracks_accepted['ParticlePt'].values.size), m) + # Apply a pT cut and make mass array + df_tracks_accepted, m_array = None, None + if 'ParticlePt' in df_tracks.columns: + df_tracks_accepted = df_tracks[df_tracks.ParticlePt > min_pt] + m_array = np.full((df_tracks_accepted['ParticlePt'].values.size), m) + elif 'ParticlePx' in df_tracks.columns: + df_tracks_accepted = df_tracks[(df_tracks.ParticlePx**2 + df_tracks.ParticlePy**2)**0.5 > min_pt] + #m_array = (df_tracks_accepted["ParticleE"]**2 - df_tracks_accepted["ParticlePx"]**2 - df_tracks_accepted["ParticlePy"]**2 - df_tracks_accepted["ParticlePz"]**2)**0.5 + m_array = np.full((df_tracks_accepted['ParticlePx'].values.size), m) + else: + raise ValueError("Neither ParticlePt nor ParticlePx detected in tracks dataframe") # Randomly assign K and p mass for systematic check if random_mass: @@ -338,8 +486,16 @@ def get_fjparticles(self, df_tracks, m, offset_indices=False, random_mass=False, m_array = np.where(rand_val > p_prob, p_mass, m_array) # Use swig'd function to create a vector of fastjet::PseudoJets from numpy arrays of pt,eta,phi - fj_particles = fjext.vectorize_pt_eta_phi_m( - df_tracks_accepted['ParticlePt'].values, df_tracks_accepted['ParticleEta'].values, - df_tracks_accepted['ParticlePhi'].values, m_array, user_index_offset) + if 'ParticlePt' in df_tracks_accepted.columns: + fj_particles = fjext.vectorize_pt_eta_phi_m( + df_tracks_accepted['ParticlePt'].values, df_tracks_accepted['ParticleEta'].values, + df_tracks_accepted['ParticlePhi'].values, m_array, user_index_offset) + elif 'ParticlePx' in df_tracks_accepted.columns: + #fj_particles = fjext.vectorize_px_py_pz_e( + # df_tracks_accepted['ParticlePx'].values, df_tracks_accepted['ParticlePy'].values, + # df_tracks_accepted['ParticlePz'].values, df_tracks_accepted['ParticleE'].values, user_index_offset) + fj_particles = fjext.vectorize_px_py_pz_m( + df_tracks_accepted['ParticlePx'].values, df_tracks_accepted['ParticlePy'].values, + df_tracks_accepted['ParticlePz'].values, m_array, user_index_offset) return fj_particles diff --git a/pyjetty/alice_analysis/process/base/process_io_parton_hadron.py b/pyjetty/alice_analysis/process/base/process_io_parton_hadron.py index cf63dbc28..ed92dc80f 100644 --- a/pyjetty/alice_analysis/process/base/process_io_parton_hadron.py +++ b/pyjetty/alice_analysis/process/base/process_io_parton_hadron.py @@ -33,7 +33,7 @@ def li_concat(li): ################################################################ class ProcessIO(common_base.CommonBase): - + #--------------------------------------------------------------- # Constructor # - level is either 'p' or 'h' (parton or hadron) @@ -63,7 +63,7 @@ def __init__(self, input_file='', tree_name_base='tree_Particle_gen', 'ParticlePx', 'ParticlePy', 'ParticlePz'] elif level == 'h': self.columns = ['run_number', 'ev_id', 'ParticleE', 'ParticlePx', - 'ParticlePy', 'ParticlePz', 'is_charged'] + 'ParticlePy', 'ParticlePz', 'is_charged'] else: raise ValueError("Particle level %s not recognized / use either 'p' or 'h'") @@ -191,10 +191,10 @@ def group_fjparticles(self, group_by_evid=True, ch_cut=False): # (i) Group the track dataframe by event # track_df_grouped is a DataFrameGroupBy object with one track dataframe per event track_df_grouped = track_df.groupby(self.unique_identifier) - + # (ii) Transform the DataFrameGroupBy object to a SeriesGroupBy of fastjet particles df_fjparticles = track_df_grouped.apply(self.get_fjparticles) - + # These are only useful for numpy implementation self.run_numbers = self.unique_ev_ids_per_run = None ''' @@ -219,12 +219,12 @@ def group_fjparticles(self, group_by_evid=True, ch_cut=False): else: # Transform the track dataframe into a dataframe of fastjet particles per track ''' Pandas implementation - df_fjparticles = pandas.DataFrame( - {"run_number": track_df["run_number"], "ev_id": track_df["ev_id"], + df_fjparticles = pandas.DataFrame( + {"run_number": track_df["run_number"], "ev_id": track_df["ev_id"], "fj_particle": self.get_fjparticles(track_df)} ) ''' df_fjparticles = { - "run_number": track_df["run_number"], "ev_id": track_df["ev_id"], + "run_number": track_df["run_number"], "ev_id": track_df["ev_id"], "fj_particle": fj_particles } #print(df_fjparticles) diff --git a/pyjetty/alice_analysis/process/base/process_utils.py b/pyjetty/alice_analysis/process/base/process_utils.py index f8b1599ee..b444773ca 100755 --- a/pyjetty/alice_analysis/process/base/process_utils.py +++ b/pyjetty/alice_analysis/process/base/process_utils.py @@ -3,7 +3,9 @@ """ Analysis utilities for jet analysis with track dataframe. - Author: James Mulligan (james.mulligan@berkeley.edu) + Authors: + James Mulligan (james.mulligan@berkeley.edu) + Ezra Lesser (elesser@berkeley.edu) """ from __future__ import print_function @@ -37,21 +39,35 @@ def __init__(self, **kwargs): #--------------------------------------------------------------- # Check if det-jet passes acceptance criteria #--------------------------------------------------------------- - def is_det_jet_accepted(self, jet_det): + def is_det_jet_accepted(self, jet_det, min_leading_track_pT=None): + + min_found = False for track in jet_det.constituents(): + if min_leading_track_pT and not min_found and track.pt() >= min_leading_track_pT: + min_found = True + if track.pt() > 100.: return False - return True + return min_found if min_leading_track_pT else True #--------------------------------------------------------------- # Check if truth-jet passes acceptance criteria #--------------------------------------------------------------- - def is_truth_jet_accepted(self, jet_truth): - - return self.is_det_jet_accepted(self, jet_truth) + def is_truth_jet_accepted(self, jet_truth, min_leading_track_pT=None): + + # If there is no leading track pT cut, accept all truth jets + if not min_leading_track_pT: + return True + + # Otherwise same def as is_det_jet_accepted, but without 100 GeV track check + for track in jet_truth.constituents(): + if track.pt() >= min_leading_track_pT: + return True + return False + #--------------------------------------------------------------- # Compute delta-R (eta-phi) between a PseudoJet and a given eta,phi value @@ -106,26 +122,28 @@ def leading_jet(self, jets): # the GroomerShop remains in scope. #--------------------------------------------------------------- def groom(self, gshop, grooming_setting, jetR): - + if 'sd' in grooming_setting: - + zcut = grooming_setting['sd'][0] beta = grooming_setting['sd'][1] return gshop.soft_drop(beta, zcut, jetR) elif 'dg' in grooming_setting: - + if len(gshop.jet().constituents()) < 2: return None - + a = grooming_setting['dg'][0] - + if a == 'max_pt_softer': return gshop.max_pt_softer() elif a == 'max_z': return gshop.max_z() elif a == 'max_kt': return gshop.max_kt() + elif len(a) > 6 and a[0:7] == 'late_kt': + return gshop.late_kt(float(a[8:])) elif a == 'max_kappa': return gshop.max_kappa() elif a == 'max_tf': @@ -134,9 +152,8 @@ def groom(self, gshop, grooming_setting, jetR): return gshop.min_tf() else: return gshop.dynamical(a) - + else: sys.exit('grooming_setting {} not recognized.'.format(grooming_setting)) - diff --git a/pyjetty/alice_analysis/process/user/ang_pp/ang_data.py b/pyjetty/alice_analysis/process/user/ang/ang_data.py similarity index 87% rename from pyjetty/alice_analysis/process/user/ang_pp/ang_data.py rename to pyjetty/alice_analysis/process/user/ang/ang_data.py index 6374ab666..ae98d5ac4 100755 --- a/pyjetty/alice_analysis/process/user/ang_pp/ang_data.py +++ b/pyjetty/alice_analysis/process/user/ang/ang_data.py @@ -91,7 +91,7 @@ def initialize_config(self): # Set configuration for analysis self.jetR_list = config["jetR"] - self.beta_list = config["betas"] + self.alpha_list = config["alphas"] self.n_pt_bins = config["n_pt_bins"] self.pt_limits = config["pt_limits"] @@ -118,7 +118,7 @@ def initialize_config(self): self.grooming_settings = [{'sd': [self.sd_zcut, self.sd_beta]}] # self.utils.grooming_settings self.grooming_labels = [self.utils.grooming_label(gs) for gs in self.grooming_settings] - # Configs for each jetR / beta + # Configs for each jetR / alpha #self.config_dict = config["ang"] #--------------------------------------------------------------- @@ -147,63 +147,63 @@ def initializeHistograms(self): setattr(self, name, h) ''' - for beta in self.beta_list: + for alpha in self.alpha_list: ''' for i, pTmin in list(enumerate(self.pTbins))[0:-1]: - # Individual angularity plots, \lambda_{\beta}^{\kappa} + # Individual angularity plots, \lambda_{\alpha}^{\kappa} pTmax = self.pTbins[i+1] name = "hLambda_pT%i-%i_%s" % (pTmin, pTmax, config) h = ROOT.TH1F(name, name, self.n_lambda_bins, 0, 1.0) - h.GetXaxis().SetTitle('#lambda_{%s}' % beta) - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % beta) + h.GetXaxis().SetTitle('#lambda_{%s}' % alpha) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % alpha) h.Sumw2() setattr(self, name, h) # Angularities with soft drop name = "hLambda_pT%i-%i_%s_SD" % (pTmin, pTmax, config) h = ROOT.TH1F(name, name, self.n_lambda_bins, 0, 1.0) - h.GetXaxis().SetTitle('#lambda_{%s}' % beta) - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % beta) + h.GetXaxis().SetTitle('#lambda_{%s}' % alpha) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % alpha) h.Sumw2() setattr(self, name, h) ''' # Lambda vs pT plots with fine binning - name = "h_ang_JetPt_R%s_%s" % (jetR, beta) + name = "h_ang_JetPt_R%s_%s" % (jetR, alpha) h = ROOT.TH2F(name, name, len(self.pt_bins_response)-1, self.pt_bins_response, len(self.obs_bins_response)-1, self.obs_bins_response) h.GetXaxis().SetTitle('#it{p}_{T,ch jet}') - h.GetYaxis().SetTitle('#it{#lambda}_{#it{#beta}=%s}' % beta) + h.GetYaxis().SetTitle('#it{#lambda}_{#it{#alpha}=%s}' % alpha) h.Sumw2() setattr(self, name, h) # Lambda vs rapidity plots with fine binning - name = "h_ang_JetRap_R%s_%s" % (jetR, beta) + name = "h_ang_JetRap_R%s_%s" % (jetR, alpha) h = ROOT.TH2F(name, name, self.n_rap_bins, self.rap_limits[0], self.rap_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{#eta}_{ch jet}') - h.GetYaxis().SetTitle('#it{#lambda}_{#it{#beta}=%s}' % beta) + h.GetYaxis().SetTitle('#it{#lambda}_{#it{#alpha}=%s}' % alpha) h.Sumw2() setattr(self, name, h) for gl in self.grooming_labels: # Lambda vs pT plots with fine binning -- with soft drop - name = "h_ang_JetPt_R%s_%s_%s" % (jetR, beta, gl) + name = "h_ang_JetPt_R%s_%s_%s" % (jetR, alpha, gl) h = ROOT.TH2F(name, name, len(self.pt_bins_response)-1, self.pt_bins_response, len(self.obs_bins_response)-1, self.obs_bins_response) h.GetXaxis().SetTitle('#it{p}_{T,ch jet}') - h.GetYaxis().SetTitle('#it{#lambda}_{#it{#beta}=%s}' % beta) + h.GetYaxis().SetTitle('#it{#lambda}_{#it{#alpha}=%s}' % alpha) h.Sumw2() setattr(self, name, h) # Lambda vs pT plots with fine binning -- with soft drop - name = "h_ang_JetRap_R%s_%s_%s" % (jetR, beta, gl) + name = "h_ang_JetRap_R%s_%s_%s" % (jetR, alpha, gl) h = ROOT.TH2F(name, name, self.n_rap_bins, self.rap_limits[0], self.rap_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{#eta}_{ch jet}') - h.GetYaxis().SetTitle('#it{#lambda}_{#it{#beta}=%s}' % beta) + h.GetYaxis().SetTitle('#it{#lambda}_{#it{#alpha}=%s}' % alpha) h.Sumw2() setattr(self, name, h) @@ -223,16 +223,16 @@ def analyzeEvents(self): print('jet definition is:', jet_def) print('jet selector is:', jet_selector,'\n') - for beta in self.beta_list: + for alpha in self.alpha_list: for fj_particles in self.df_fjparticles: # do jet-finding and fill histograms - self.analyzeJets(fj_particles, jet_def, jet_selector, beta) + self.analyzeJets(fj_particles, jet_def, jet_selector, alpha) #--------------------------------------------------------------- # Analyze jets of a given event. # fj_particles is the list of fastjet pseudojets for a single fixed event. #--------------------------------------------------------------- - def analyzeJets(self, fj_particles, jet_def, jet_selector, beta): + def analyzeJets(self, fj_particles, jet_def, jet_selector, alpha): # Do jet finding cs = fj.ClusterSequence(fj_particles, jet_def) @@ -252,20 +252,20 @@ def analyzeJets(self, fj_particles, jet_def, jet_selector, beta): continue # Fill histograms - self.fillJetHistograms(jet, jetR, beta) + self.fillJetHistograms(jet, jetR, alpha) #--------------------------------------------------------------- # Fill jet histograms #--------------------------------------------------------------- - def fillJetHistograms(self, jet, jetR, beta): + def fillJetHistograms(self, jet, jetR, alpha): kappa = 1 - l = fjext.lambda_beta_kappa(jet, beta, kappa, jetR) + l = fjext.lambda_beta_kappa(jet, alpha, kappa, jetR) # Fill plots - getattr(self, "h_ang_JetPt_R%s_%s" % (jetR, beta)).Fill(jet.pt(), l) - getattr(self, "h_ang_JetRap_R%s_%s" % (jetR, beta)).Fill(jet.rap(), l) + getattr(self, "h_ang_JetPt_R%s_%s" % (jetR, alpha)).Fill(jet.pt(), l) + getattr(self, "h_ang_JetRap_R%s_%s" % (jetR, alpha)).Fill(jet.rap(), l) getattr(self, "hJetPt_R%s" % str(jetR)).Fill(jet.pt()) #getattr(self, "hM_JetPt_R%s" % str(jetR)).Fill(jet_pt, jet.m()) @@ -275,10 +275,10 @@ def fillJetHistograms(self, jet, jetR, beta): gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) jet_sd = self.utils.groom(gshop, gs, jetR).pair() - l_sd = fjext.lambda_beta_kappa(jet, jet_sd, beta, kappa, jetR) + l_sd = fjext.lambda_alpha_kappa(jet, jet_sd, alpha, kappa, jetR) # Should fill histograms using the ungroomed jet pT & rapidity - getattr(self, ("h_ang_JetPt_R%s_%s_%s" % (jetR, beta, gl))).Fill(jet.pt(), l_sd) - getattr(self, ("h_ang_JetRap_R%s_%s_%s" % (jetR, beta, gl))).Fill(jet.rap(), l_sd) + getattr(self, ("h_ang_JetPt_R%s_%s_%s" % (jetR, alpha, gl))).Fill(jet.pt(), l_sd) + getattr(self, ("h_ang_JetRap_R%s_%s_%s" % (jetR, alpha, gl))).Fill(jet.rap(), l_sd) ################################################################## diff --git a/pyjetty/alice_analysis/process/user/ang_pp/ang_fs.py b/pyjetty/alice_analysis/process/user/ang/ang_fs.py similarity index 88% rename from pyjetty/alice_analysis/process/user/ang_pp/ang_fs.py rename to pyjetty/alice_analysis/process/user/ang/ang_fs.py index 39cce0be6..7f5219816 100755 --- a/pyjetty/alice_analysis/process/user/ang_pp/ang_fs.py +++ b/pyjetty/alice_analysis/process/user/ang/ang_fs.py @@ -133,7 +133,7 @@ def initialize_config(self): # Set configuration for analysis self.jetR_list = config['jetR'] - self.beta_list = config['betas'] + self.alpha_list = config['alphas'] self.n_pt_bins = config["n_pt_bins"] self.pt_limits = config["pt_limits"] @@ -199,14 +199,14 @@ def initializeHistograms(self): h.Sumw2() setattr(self, name, h) - for beta in self.beta_list: + for alpha in self.alpha_list: - label = "R%s_%s" % (str(jetR), str(beta)) + label = "R%s_%s" % (str(jetR), str(alpha)) name = 'hResponse_ang_%s' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#it{#lambda}_{%s,det}' % beta) - h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru}' % beta) + h.GetXaxis().SetTitle('#it{#lambda}_{%s,det}' % alpha) + h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru}' % alpha) h.Sumw2() setattr(self, name, h) @@ -214,7 +214,7 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,det}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s}}' % str(alpha)) h.Sumw2() setattr(self, name, h) @@ -222,26 +222,26 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,tru}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s}}' % str(alpha)) h.Sumw2() setattr(self, name, h) ''' for i, pTmin in list(enumerate(self.pTbins))[0:-1]: - # Angularities, \lambda_{\beta}^{\kappa} + # Angularities, \lambda_{\alpha}^{\kappa} pTmax = self.pTbins[i+1] - name = ("hLambda_pT%i-%i_R%s_B%s_mcdet" % (pTmin, pTmax, jetR, beta)).replace('.', '') + name = ("hLambda_pT%i-%i_R%s_B%s_mcdet" % (pTmin, pTmax, jetR, alpha)).replace('.', '') h = ROOT.TH1F(name, name, self.n_lambda_bins, 0, 1.0) - h.GetXaxis().SetTitle('#lambda_{%s}' % beta) - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % beta) + h.GetXaxis().SetTitle('#lambda_{%s}' % alpha) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % alpha) setattr(self, name, h) # Angularities with soft drop - name = ("hLambda_pT%i-%i_R%s_B%s_mcdet_SD" % (pTmin, pTmax, jetR, beta)).replace('.', '') + name = ("hLambda_pT%i-%i_R%s_B%s_mcdet_SD" % (pTmin, pTmax, jetR, alpha)).replace('.', '') h = ROOT.TH1F(name, name, self.n_lambda_bins, 0, 1.0) - h.GetXaxis().SetTitle('#lambda_{%s}' % beta) - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % beta) + h.GetXaxis().SetTitle('#lambda_{%s}' % alpha) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % alpha) setattr(self, name, h) ''' @@ -249,14 +249,14 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 2.) h.GetXaxis().SetTitle('#it{p}_{T,truth}^{ch jet}') h.GetYaxis().SetTitle( - '#frac{#it{#lambda}_{#it{#beta},det}-#it{#lambda}_{#it{#beta},truth}}{#it{#lambda}_{#it{#beta},truth}}') + '#frac{#it{#lambda}_{#it{#alpha},det}-#it{#lambda}_{#it{#alpha},truth}}{#it{#lambda}_{#it{#alpha},truth}}') h.Sumw2() setattr(self, name, h) # Create THn of response dim = 4 title = ['#it{p}_{T,det}^{ch jet}', '#it{p}_{T,truth}^{ch jet}', - '#it{#lambda}_{#it{#beta},det}', '#it{#lambda}_{#it{#beta},truth}'] + '#it{#lambda}_{#it{#alpha},det}', '#it{#lambda}_{#it{#alpha},truth}'] pt_bins = array('d', list(range(5, 100, 5)) + list(range(100, 210, 10))) obs_bins = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), np.linspace(0.11, 0.8, 70))) @@ -285,8 +285,8 @@ def initializeHistograms(self): for gl in self.grooming_labels: name = 'hResponse_ang_%s_%s' % (label, gl) h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#it{#lambda}_{%s,det,SD}' % beta) - h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru,SD}' % beta) + h.GetXaxis().SetTitle('#it{#lambda}_{%s,det,SD}' % alpha) + h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru,SD}' % alpha) h.Sumw2() setattr(self, name, h) @@ -294,7 +294,7 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,det,SD}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s,SD}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s,SD}}' % str(alpha)) h.Sumw2() setattr(self, name, h) @@ -302,7 +302,7 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,tru,SD}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s,SD}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s,SD}}' % str(alpha)) h.Sumw2() setattr(self, name, h) @@ -403,8 +403,8 @@ def analyze_jets(self, fj_particles_det, fj_particles_truth, jet_def, jet_select if self.debug_level > 1: print('event rejected due to jet acceptance') return - - #self.fill_det_before_matching(jet_det, jetR) + + self.fill_det_before_matching(jet_det, jetR) # Fill truth-level jet histograms (before matching) for jet_truth in jets_truth_selected: @@ -420,7 +420,7 @@ def analyze_jets(self, fj_particles_det, fj_particles_truth, jet_def, jet_select hname_QA = 'hJetMatchingQA_R%s' % str(jetR).replace('.', '') for jet_det in jets_det_selected: self.set_matches_pp(jet_det, hname_QA) - + # Fill matching histograms if jet_det.has_user_info(): jet_truth = jet_det.python_info().match @@ -428,8 +428,8 @@ def analyze_jets(self, fj_particles_det, fj_particles_truth, jet_def, jet_select self.fill_matching_histograms(jet_det, jet_truth, jetR) # Fill response matrices for each subobservable - for beta in self.beta_list: - self.fill_response_histograms(jet_det, jet_truth, jetR, beta) + for alpha in self.alpha_list: + self.fill_response_histograms(jet_det, jet_truth, jetR, alpha) ''' #--------------------------------------------------------------- @@ -462,15 +462,40 @@ def fill_truth_before_matching(self, jet, jetR): getattr(self, 'hJetPt_Truth_R%s' % str(jetR).replace('.', '')).Fill(jet.pt()) getattr(self, 'hJetPt_N_R%s' % str(jetR).replace('.', '')).Fill(jet.pt(), len(jet.constituents())) + kappa = 1 + for alpha in self.alpha_list: + label = "R%s_%s" % (str(jetR), str(alpha)) + + l = fjext.lambda_beta_kappa(jet, alpha, kappa, jetR) + getattr(self, 'hAng_JetPt_tru_%s' % label).Fill(jet.pt(), l) + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_sd = self.utils.groom(gshop, gs, jetR).pair() + l_sd = fjext.lambda_beta_kappa(jet, jet_sd, alpha, kappa, jetR) + getattr(self, 'hAng_JetPt_tru_%s_%s' % (label, gl)).Fill(jet.pt(), l_sd) - ''' #--------------------------------------------------------------- # Fill det jet histograms #--------------------------------------------------------------- def fill_det_before_matching(self, jet, jetR): - # Implement here - pass - ''' + + kappa = 1 + for alpha in self.alpha_list: + label = "R%s_%s" % (str(jetR), str(alpha)) + + l = fjext.lambda_beta_kappa(jet, alpha, kappa, jetR) + getattr(self, 'hAng_JetPt_det_%s' % label).Fill(jet.pt(), l) + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_sd = self.utils.groom(gshop, gs, jetR).pair() + l_sd = fjext.lambda_beta_kappa(jet, jet_sd, alpha, kappa, jetR) + getattr(self, 'hAng_JetPt_det_%s_%s' % (label, gl)).Fill(jet.pt(), l_sd) #--------------------------------------------------------------- # Loop through jets and fill matching histos @@ -488,7 +513,7 @@ def fill_matching_histograms(self, jet_det, jet_truth, jetR): #--------------------------------------------------------------- # Fill response histograms #--------------------------------------------------------------- - def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): + def fill_response_histograms(self, jet_det, jet_tru, jetR, alpha): # Ungroomed jet pT jet_pt_det = jet_det.pt() @@ -496,10 +521,10 @@ def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): # just use kappa = 1 for now kappa = 1 - l_det = fjext.lambda_beta_kappa(jet_det, beta, kappa, jetR) - l_tru = fjext.lambda_beta_kappa(jet_tru, beta, kappa, jetR) + l_det = fjext.lambda_beta_kappa(jet_det, alpha, kappa, jetR) + l_tru = fjext.lambda_beta_kappa(jet_tru, alpha, kappa, jetR) - label = "R%s_%s" % (str(jetR), str(beta)) + label = "R%s_%s" % (str(jetR), str(alpha)) ''' Histograms per pT bin (currently unnecessary and cause clutter) (pTmin, pTmax) = pT_bin(jet_det.pt(), self.pTbins) @@ -516,8 +541,8 @@ def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): getattr(self, 'hAngResidual_JetPt_%s' % label).Fill(jet_pt_tru, lambda_resolution) # Observable plots - getattr(self, 'hAng_JetPt_det_%s' % label).Fill(jet_pt_det, l_det) - getattr(self, 'hAng_JetPt_tru_%s' % label).Fill(jet_pt_tru, l_tru) + #getattr(self, 'hAng_JetPt_det_%s' % label).Fill(jet_pt_det, l_det) + #getattr(self, 'hAng_JetPt_tru_%s' % label).Fill(jet_pt_tru, l_tru) getattr(self, 'hResponse_ang_%s' % label).Fill(l_det, l_tru) x = ([jet_pt_det, jet_pt_tru, l_det, l_tru]) @@ -534,12 +559,12 @@ def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): jet_sd_tru = self.utils.groom(gshop_tru, gs, jetR).pair() # lambda for soft drop jet - l_sd_det = fjext.lambda_beta_kappa(jet_det, jet_sd_det, beta, kappa, jetR) - l_sd_tru = fjext.lambda_beta_kappa(jet_tru, jet_sd_tru, beta, kappa, jetR) + l_sd_det = fjext.lambda_beta_kappa(jet_det, jet_sd_det, alpha, kappa, jetR) + l_sd_tru = fjext.lambda_beta_kappa(jet_tru, jet_sd_tru, alpha, kappa, jetR) # Should fill histograms using the ungroomed jet pT - getattr(self, 'hAng_JetPt_det_%s_%s' % (label, gl)).Fill(jet_pt_det, l_sd_det) - getattr(self, 'hAng_JetPt_tru_%s_%s' % (label, gl)).Fill(jet_pt_tru, l_sd_tru) + #getattr(self, 'hAng_JetPt_det_%s_%s' % (label, gl)).Fill(jet_pt_det, l_sd_det) + #getattr(self, 'hAng_JetPt_tru_%s_%s' % (label, gl)).Fill(jet_pt_tru, l_sd_tru) getattr(self, 'hResponse_ang_%s_%s' % (label, gl)).Fill(l_sd_det, l_sd_tru) x = ([jet_pt_det, jet_pt_tru, l_sd_det, l_sd_tru]) x_array = array('d', x) diff --git a/pyjetty/alice_analysis/process/user/ang_pp/ang_mc.py b/pyjetty/alice_analysis/process/user/ang/ang_mc.py similarity index 93% rename from pyjetty/alice_analysis/process/user/ang_pp/ang_mc.py rename to pyjetty/alice_analysis/process/user/ang/ang_mc.py index 3394f7fa0..6a3511291 100755 --- a/pyjetty/alice_analysis/process/user/ang_pp/ang_mc.py +++ b/pyjetty/alice_analysis/process/user/ang/ang_mc.py @@ -132,7 +132,7 @@ def initialize_config(self): # Set configuration for analysis self.jetR_list = config['jetR'] - self.beta_list = config['betas'] + self.alpha_list = config['alphas'] self.n_pt_bins = config["n_pt_bins"] self.pt_limits = config["pt_limits"] @@ -198,14 +198,14 @@ def initializeHistograms(self): h.Sumw2() setattr(self, name, h) - for beta in self.beta_list: + for alpha in self.alpha_list: - label = "R%s_%s" % (str(jetR), str(beta)) + label = "R%s_%s" % (str(jetR), str(alpha)) name = 'hResponse_ang_%s' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#it{#lambda}_{%s,det}' % beta) - h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru}' % beta) + h.GetXaxis().SetTitle('#it{#lambda}_{%s,det}' % alpha) + h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru}' % alpha) h.Sumw2() setattr(self, name, h) @@ -213,7 +213,7 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,det}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s}}' % str(alpha)) h.Sumw2() setattr(self, name, h) @@ -221,26 +221,26 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,tru}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s}}' % str(alpha)) h.Sumw2() setattr(self, name, h) ''' for i, pTmin in list(enumerate(self.pTbins))[0:-1]: - # Angularities, \lambda_{\beta}^{\kappa} + # Angularities, \lambda_{\alpha}^{\kappa} pTmax = self.pTbins[i+1] - name = ("hLambda_pT%i-%i_R%s_B%s_mcdet" % (pTmin, pTmax, jetR, beta)).replace('.', '') + name = ("hLambda_pT%i-%i_R%s_B%s_mcdet" % (pTmin, pTmax, jetR, alpha)).replace('.', '') h = ROOT.TH1F(name, name, self.n_lambda_bins, 0, 1.0) - h.GetXaxis().SetTitle('#lambda_{%s}' % beta) - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % beta) + h.GetXaxis().SetTitle('#lambda_{%s}' % alpha) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % alpha) setattr(self, name, h) # Angularities with soft drop - name = ("hLambda_pT%i-%i_R%s_B%s_mcdet_SD" % (pTmin, pTmax, jetR, beta)).replace('.', '') + name = ("hLambda_pT%i-%i_R%s_B%s_mcdet_SD" % (pTmin, pTmax, jetR, alpha)).replace('.', '') h = ROOT.TH1F(name, name, self.n_lambda_bins, 0, 1.0) - h.GetXaxis().SetTitle('#lambda_{%s}' % beta) - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % beta) + h.GetXaxis().SetTitle('#lambda_{%s}' % alpha) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{%s}}' % alpha) setattr(self, name, h) ''' @@ -248,14 +248,14 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 2.) h.GetXaxis().SetTitle('#it{p}_{T,truth}^{ch jet}') h.GetYaxis().SetTitle( - '#frac{#it{#lambda}_{#it{#beta},det}-#it{#lambda}_{#it{#beta},truth}}{#it{#lambda}_{#it{#beta},truth}}') + '#frac{#it{#lambda}_{#it{#alpha},det}-#it{#lambda}_{#it{#alpha},truth}}{#it{#lambda}_{#it{#alpha},truth}}') h.Sumw2() setattr(self, name, h) # Create THn of response dim = 4 title = ['#it{p}_{T,det}^{ch jet}', '#it{p}_{T,truth}^{ch jet}', - '#it{#lambda}_{#it{#beta},det}', '#it{#lambda}_{#it{#beta},truth}'] + '#it{#lambda}_{#it{#alpha},det}', '#it{#lambda}_{#it{#alpha},truth}'] pt_bins = array('d', list(range(5, 100, 5)) + list(range(100, 210, 10))) obs_bins = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), np.linspace(0.11, 0.8, 70))) @@ -284,8 +284,8 @@ def initializeHistograms(self): for gl in self.grooming_labels: name = 'hResponse_ang_%s_%s' % (label, gl) h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#it{#lambda}_{%s,det,SD}' % beta) - h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru,SD}' % beta) + h.GetXaxis().SetTitle('#it{#lambda}_{%s,det,SD}' % alpha) + h.GetYaxis().SetTitle('#it{#lambda}_{%s,tru,SD}' % alpha) h.Sumw2() setattr(self, name, h) @@ -293,7 +293,7 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,det,SD}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s,SD}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{det,%s,SD}}' % str(alpha)) h.Sumw2() setattr(self, name, h) @@ -301,7 +301,7 @@ def initializeHistograms(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('#it{p}_{T,tru,SD}^{ch jet}') - h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s,SD}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{d#it{N}}{d#it{#lambda}_{tru,%s,SD}}' % str(alpha)) h.Sumw2() setattr(self, name, h) @@ -427,8 +427,8 @@ def analyze_jets(self, fj_particles_det, fj_particles_truth, jet_def, jet_select self.fill_matching_histograms(jet_det, jet_truth, jetR) # Fill response matrices for each subobservable - for beta in self.beta_list: - self.fill_response_histograms(jet_det, jet_truth, jetR, beta) + for alpha in self.alpha_list: + self.fill_response_histograms(jet_det, jet_truth, jetR, alpha) ''' #--------------------------------------------------------------- @@ -487,7 +487,7 @@ def fill_matching_histograms(self, jet_det, jet_truth, jetR): #--------------------------------------------------------------- # Fill response histograms #--------------------------------------------------------------- - def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): + def fill_response_histograms(self, jet_det, jet_tru, jetR, alpha): # Ungroomed jet pT jet_pt_det = jet_det.pt() @@ -495,10 +495,10 @@ def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): # just use kappa = 1 for now kappa = 1 - l_det = fjext.lambda_beta_kappa(jet_det, beta, kappa, jetR) - l_tru = fjext.lambda_beta_kappa(jet_tru, beta, kappa, jetR) + l_det = fjext.lambda_beta_kappa(jet_det, alpha, kappa, jetR) + l_tru = fjext.lambda_beta_kappa(jet_tru, alpha, kappa, jetR) - label = "R%s_%s" % (str(jetR), str(beta)) + label = "R%s_%s" % (str(jetR), str(alpha)) ''' Histograms per pT bin (currently unnecessary and cause clutter) (pTmin, pTmax) = pT_bin(jet_det.pt(), self.pTbins) @@ -533,8 +533,8 @@ def fill_response_histograms(self, jet_det, jet_tru, jetR, beta): jet_sd_tru = self.utils.groom(gshop_tru, gs, jetR).pair() # lambda for soft drop jet - l_sd_det = fjext.lambda_beta_kappa(jet_det, jet_sd_det, beta, kappa, jetR) - l_sd_tru = fjext.lambda_beta_kappa(jet_tru, jet_sd_tru, beta, kappa, jetR) + l_sd_det = fjext.lambda_beta_kappa(jet_det, jet_sd_det, alpha, kappa, jetR) + l_sd_tru = fjext.lambda_beta_kappa(jet_tru, jet_sd_tru, alpha, kappa, jetR) # Should fill histograms using the ungroomed jet pT getattr(self, 'hAng_JetPt_det_%s_%s' % (label, gl)).Fill(jet_pt_det, l_sd_det) diff --git a/pyjetty/alice_analysis/process/user/ang_pp/ang_pyth.py b/pyjetty/alice_analysis/process/user/ang/ang_pyth.py similarity index 90% rename from pyjetty/alice_analysis/process/user/ang_pp/ang_pyth.py rename to pyjetty/alice_analysis/process/user/ang/ang_pyth.py index 946d8c47f..d1b471340 100644 --- a/pyjetty/alice_analysis/process/user/ang_pp/ang_pyth.py +++ b/pyjetty/alice_analysis/process/user/ang/ang_pyth.py @@ -67,7 +67,7 @@ def process_ang_data(self): inf_MPIon = self.input_dir + "PythiaResults_R%s.root" % str(jetR) inf_MPIoff = self.input_dir + "PythiaResults_R%s_MPIoff.root" % str(jetR) io = process_io_pyth.ProcessIO(input_file_MPIoff=inf_MPIoff, input_file_MPIon=inf_MPIon, - mergebetween=False, betas=self.beta_list) + mergebetween=False, alphas=self.alpha_list) self.df_ang_jets = io.load_data() print('--- {} seconds ---'.format(time.time() - start_time)) @@ -95,7 +95,7 @@ def initialize_config(self): # Set configuration for analysis self.jetR_list = config["jetR"] - self.beta_list = config["betas"] + self.alpha_list = config["alphas"] self.n_pt_bins = config["n_pt_bins"] self.pt_limits = config["pt_limits"] @@ -133,41 +133,41 @@ def initializeHistograms(self): h.GetYaxis().SetTitle('p_{T}^{jet, ch}') setattr(self, name, h) - for beta in self.beta_list: + for alpha in self.alpha_list: - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') + label = ("R%s_%s" % (str(jetR), str(alpha))).replace('.', '') name = 'hResponse_ang_p_%s' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) + h.GetXaxis().SetTitle('#lambda_{#alpha=%s}^{parton}' % alpha) + h.GetYaxis().SetTitle('#lambda_{#alpha=%s}^{ch}' % alpha) setattr(self, name, h) name = 'hAng_JetPt_ch_%s' % label h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('p_{T}^{jet, ch}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{ch}}' % str(alpha)) setattr(self, name, h) name = 'hAng_JetPt_p_%s' % label h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{parton}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{parton}}' % str(alpha)) setattr(self, name, h) name = "hAngResidual_JetPt_%s" % label h = ROOT.TH2F(name, name, 300, 0, 300, 200, -2., 2.) h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{#lambda_{#beta}^{jet, parton}-#lambda_{#beta}' + \ - '^{jet, ch}}{#lambda_{#beta}^{jet, parton}}') + h.GetYaxis().SetTitle('#frac{#lambda_{#alpha}^{jet, parton}-#lambda_{#alpha}' + \ + '^{jet, ch}}{#lambda_{#alpha}^{jet, parton}}') setattr(self, name, h) # Create THn of response dim = 4 title = ['p_{T}^{jet, parton}', 'p_{T}^{jet, ch}', - '#lambda_{#beta}^{parton}', '#lambda_{#beta}^{ch}'] + '#lambda_{#alpha}^{parton}', '#lambda_{#alpha}^{ch}'] nbins = [10, 20, 100, 100] min_li = [0., 0., 0., 0.] max_li = [100., 200., 1.0, 1.0] @@ -198,9 +198,9 @@ def analyzeJets(self, jetR): self.fillJetHistograms(ang_jets, jetR) - for beta in self.beta_list: + for alpha in self.alpha_list: - self.fillRMs(ang_jets, jetR, beta) + self.fillRMs(ang_jets, jetR, alpha) #--------------------------------------------------------------- # Fill jet histograms for each value of R @@ -222,11 +222,11 @@ def fillJetHistograms(self, ang_jets, jetR): #--------------------------------------------------------------- # Fill jet response matrices for each value of R #--------------------------------------------------------------- - def fillRMs(self, ang_jets, jetR, beta): + def fillRMs(self, ang_jets, jetR, alpha): - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') + label = ("R%s_%s" % (str(jetR), str(alpha))).replace('.', '') - b = str(beta).replace('.', '') + b = str(alpha).replace('.', '') l_p = ang_jets["l_p_%s" % b] l_ch = ang_jets["l_ch_%s" % b] diff --git a/pyjetty/alice_analysis/process/user/ang_pp/helpers.py b/pyjetty/alice_analysis/process/user/ang/helpers.py similarity index 94% rename from pyjetty/alice_analysis/process/user/ang_pp/helpers.py rename to pyjetty/alice_analysis/process/user/ang/helpers.py index 502423484..1f1578ab0 100644 --- a/pyjetty/alice_analysis/process/user/ang_pp/helpers.py +++ b/pyjetty/alice_analysis/process/user/ang/helpers.py @@ -30,8 +30,8 @@ def deltaR(pjet1, pjet2): # for constit in jet.constituents() ] ) # Return angularity for single fastjet.PseudoJet object with no constituents -def lambda_beta_kappa_i(constit, jet, jetR, beta, kappa): - return (constit.pt() / jet.pt())**kappa * (jet.delta_R(constit) / jetR)**beta +def lambda_alpha_kappa_i(constit, jet, jetR, alpha, kappa): + return (constit.pt() / jet.pt())**kappa * (jet.delta_R(constit) / jetR)**alpha # Helper function for finding the correct jet pT bin def pT_bin(jet_pT, pTbins): diff --git a/pyjetty/alice_analysis/process/user/ang_pp/herwig_parton_hadron.py b/pyjetty/alice_analysis/process/user/ang/herwig_parton_hadron.py similarity index 92% rename from pyjetty/alice_analysis/process/user/ang_pp/herwig_parton_hadron.py rename to pyjetty/alice_analysis/process/user/ang/herwig_parton_hadron.py index 9862c429f..b15ea92c2 100644 --- a/pyjetty/alice_analysis/process/user/ang_pp/herwig_parton_hadron.py +++ b/pyjetty/alice_analysis/process/user/ang/herwig_parton_hadron.py @@ -17,7 +17,7 @@ from pyjetty.mputils import * from pyjetty.alice_analysis.process.base import process_base -from pyjetty.alice_analysis.process.user.ang_pp.helpers import lambda_beta_kappa_i +from pyjetty.alice_analysis.process.user.ang_pp.helpers import lambda_alpha_kappa_i from array import array import numpy as np @@ -64,7 +64,7 @@ def initialize_config(self, args): self.level = args.no_match_level self.jetR_list = config["jetR"] - self.beta_list = config["betas"] + self.alpha_list = config["alphas"] # SoftDrop parameters self.use_SD = True # Change this to use SD @@ -241,16 +241,16 @@ def initialize_hist(self): getattr(self, hist_list_name).append(h) ''' - for beta in self.beta_list: + for alpha in self.alpha_list: - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') + label = ("R%s_%s" % (str(jetR), str(alpha))).replace('.', '') if self.level in [None, 'ch']: name = 'hAng_JetPt_ch_%sScaled' % label h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{ch}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -259,7 +259,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{ch}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name_MPIon).append(h) @@ -271,7 +271,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{ch}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -280,7 +280,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{ch}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name_MPIon).append(h) @@ -290,7 +290,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{jet, h}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{h}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{h}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -301,7 +301,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{jet, h}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{h}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{h}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -312,7 +312,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{parton}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{parton}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -323,7 +323,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, len(self.obs_bins)-1, self.obs_bins) h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{parton}}' % str(beta)) + h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#alpha=%s}^{parton}}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -331,8 +331,8 @@ def initialize_hist(self): if self.level == None: name = 'hResponse_ang_%sScaled' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) + h.GetXaxis().SetTitle('#lambda_{#alpha=%s}^{parton}' % alpha) + h.GetYaxis().SetTitle('#lambda_{#alpha=%s}^{ch}' % alpha) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -341,8 +341,8 @@ def initialize_hist(self): for gl in self.grooming_labels: name = 'hResponse_ang_%s_%sScaled' % (label, gl) h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) + h.GetXaxis().SetTitle('#lambda_{#alpha=%s}^{parton}' % alpha) + h.GetYaxis().SetTitle('#lambda_{#alpha=%s}^{ch}' % alpha) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -350,24 +350,24 @@ def initialize_hist(self): ''' name = 'hResponse_ang_PtBinCH20-40_%sScaled' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) + h.GetXaxis().SetTitle('#lambda_{#alpha=%s}^{parton}' % alpha) + h.GetYaxis().SetTitle('#lambda_{#alpha=%s}^{ch}' % alpha) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) name = 'hResponse_ang_PtBinCH40-60_%sScaled' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) + h.GetXaxis().SetTitle('#lambda_{#alpha=%s}^{parton}' % alpha) + h.GetYaxis().SetTitle('#lambda_{#alpha=%s}^{ch}' % alpha) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) name = 'hResponse_ang_PtBinCH60-80_%sScaled' % label h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) + h.GetXaxis().SetTitle('#lambda_{#alpha=%s}^{parton}' % alpha) + h.GetYaxis().SetTitle('#lambda_{#alpha=%s}^{ch}' % alpha) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -386,7 +386,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, 150, 0, 1.5, self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -395,7 +395,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -413,7 +413,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, 150, 0, 1.5, self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -422,7 +422,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -450,7 +450,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, 150, 0, 1.5, self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -458,7 +458,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, 150, 0, 1.5, self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -467,7 +467,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -476,7 +476,7 @@ def initialize_hist(self): h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -492,8 +492,8 @@ def initialize_hist(self): 100, 0, 1.) h.GetXaxis().SetTitle('(#it{r} / #it{R})_{ch jet}') h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{ch jet}') % (str(beta), str(beta))) + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{ch jet}') % (str(alpha), str(alpha))) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -503,8 +503,8 @@ def initialize_hist(self): 100, 0, 1.) h.GetXaxis().SetTitle('(#it{r} / #it{R})_{ch jet}') h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{ch jet}') % (str(beta), str(beta))) + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{ch jet}') % (str(alpha), str(alpha))) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -514,8 +514,8 @@ def initialize_hist(self): 100, 0, 1.) h.GetXaxis().SetTitle('(#it{r} / #it{R})_{parton jet}') h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{parton jet}') % (str(beta), str(beta))) + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{parton jet}') % (str(alpha), str(alpha))) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -525,8 +525,8 @@ def initialize_hist(self): 100, 0, 1.) h.GetXaxis().SetTitle('(#it{r} / #it{R})_{parton jet}') h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{parton jet}') % (str(beta), str(beta))) + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{parton jet}') % (str(alpha), str(alpha))) h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -535,8 +535,8 @@ def initialize_hist(self): name = "hAngResidual_JetPt_%sScaled" % label h = ROOT.TH2F(name, name, 300, 0, 300, 200, -3., 1.) h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{#lambda_{#beta}^{jet, parton}-#lambda_{#beta}' + \ - '^{ch jet}}{#lambda_{#beta}^{jet, parton}}') + h.GetYaxis().SetTitle('#frac{#lambda_{#alpha}^{jet, parton}-#lambda_{#alpha}' + \ + '^{ch jet}}{#lambda_{#alpha}^{jet, parton}}') h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -544,8 +544,8 @@ def initialize_hist(self): name = "hAngDiff_JetPt_%sScaled" % label h = ROOT.TH2F(name, name, 300, 0, 300, 200, -2., 2.) h.GetXaxis().SetTitle('#it{p}_{T}^{jet, ch}') - h.GetYaxis().SetTitle('#it{#lambda}_{#it{#beta}}^{jet, parton}-' + \ - '#it{#lambda}_{#it{#beta}}^{jet, ch}') + h.GetYaxis().SetTitle('#it{#lambda}_{#it{#alpha}}^{jet, parton}-' + \ + '#it{#lambda}_{#it{#alpha}}^{jet, ch}') h.Sumw2() setattr(self, name, h) getattr(self, hist_list_name).append(h) @@ -553,7 +553,7 @@ def initialize_hist(self): # Create THn of response dim = 4 title = ['p_{T}^{ch jet}', 'p_{T}^{parton jet}', - '#lambda_{#beta}^{ch}', '#lambda_{#beta}^{parton}'] + '#lambda_{#alpha}^{ch}', '#lambda_{#alpha}^{parton}'] nbins = [len(self.pt_bins)-1, len(self.pt_bins)-1, len(self.obs_bins)-1, len(self.obs_bins)-1] min_li = [self.pt_bins[0], self.pt_bins[0], @@ -596,7 +596,7 @@ def initialize_hist(self): # Another set of THn for full hadron folding title = ['p_{T}^{h jet}', 'p_{T}^{parton jet}', - '#lambda_{#beta}^{h}', '#lambda_{#beta}^{parton}'] + '#lambda_{#alpha}^{h}', '#lambda_{#alpha}^{parton}'] name = 'hResponse_JetPt_ang_h_%sScaled' % label h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) @@ -627,7 +627,7 @@ def initialize_hist(self): # Finally, a set of THn for folding H --> CH (with MPI on) title = ['p_{T}^{ch jet}', 'p_{T}^{h jet}', - '#lambda_{#beta}^{ch}', '#lambda_{#beta}^{h}'] + '#lambda_{#alpha}^{ch}', '#lambda_{#alpha}^{h}'] name = 'hResponse_JetPt_ang_Fnp_%sScaled' % label h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) @@ -957,14 +957,14 @@ def fill_matched_jet_tree(self, tw, jetR, iev, jp, jh, jchh): tw.fill_branch('p', jp) kappa = 1 - for beta in self.beta_list: - label = str(beta).replace('.', '') + for alpha in self.alpha_list: + label = str(alpha).replace('.', '') tw.fill_branch("l_ch_%s" % label, - fjext.lambda_beta_kappa(jchh, beta, kappa, jetR)) + fjext.lambda_beta_kappa(jchh, alpha, kappa, jetR)) tw.fill_branch("l_h_%s" % label, - fjext.lambda_beta_kappa(jh, beta, kappa, jetR)) + fjext.lambda_beta_kappa(jh, alpha, kappa, jetR)) tw.fill_branch("l_p_%s" % label, - fjext.lambda_beta_kappa(jp, beta, kappa, jetR)) + fjext.lambda_beta_kappa(jp, alpha, kappa, jetR)) # Save SoftDrop variables as well if desired if self.use_SD: @@ -980,11 +980,11 @@ def fill_matched_jet_tree(self, tw, jetR, iev, jp, jh, jchh): jet_sd_p = self.utils.groom(gshop_p, gs, jetR).pair() tw.fill_branch("l_ch_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jchh, jet_sd_chh, beta, kappa, jetR)) + jchh, jet_sd_chh, alpha, kappa, jetR)) tw.fill_branch("l_h_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jh, jet_sd_h, beta, kappa, jetR)) + jh, jet_sd_h, alpha, kappa, jetR)) tw.fill_branch("l_p_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jp, jet_sd_p, beta, kappa, jetR)) + jp, jet_sd_p, alpha, kappa, jetR)) #--------------------------------------------------------------- @@ -996,10 +996,10 @@ def fill_unmatched_jet_tree(self, tw, jetR, iev, jet): tw.fill_branch(self.level, jet) kappa = 1 - for beta in self.beta_list: - label = str(beta).replace('.', '') + for alpha in self.alpha_list: + label = str(alpha).replace('.', '') tw.fill_branch('l_%s_%s' % (self.level, label), - fjext.lambda_beta_kappa(jet, beta, kappa, jetR)) + fjext.lambda_beta_kappa(jet, alpha, kappa, jetR)) if self.use_SD: for i, gs in enumerate(self.grooming_settings): @@ -1010,7 +1010,7 @@ def fill_unmatched_jet_tree(self, tw, jetR, iev, jet): jet_sd = self.utils.groom(gshop, gs, jetR).pair() tw.fill_branch("l_ch_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jet, jet_sd, beta, kappa, jetR)) + jet, jet_sd, alpha, kappa, jetR)) #--------------------------------------------------------------- @@ -1018,12 +1018,12 @@ def fill_unmatched_jet_tree(self, tw, jetR, iev, jet): #--------------------------------------------------------------- def fill_MPI_histograms(self, jetR, jet): - for beta in self.beta_list: - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') + for alpha in self.alpha_list: + label = ("R%s_%s" % (str(jetR), str(alpha))).replace('.', '') h = getattr(self, 'hAng_JetPt_ch_MPIon_%sScaled' % label) kappa = 1 - h.Fill(jet.pt(), fjext.lambda_beta_kappa(jet, beta, kappa, jetR)) + h.Fill(jet.pt(), fjext.lambda_beta_kappa(jet, alpha, kappa, jetR)) if self.use_SD: for i, gs in enumerate(self.grooming_settings): @@ -1032,7 +1032,7 @@ def fill_MPI_histograms(self, jetR, jet): jet_sd = self.utils.groom(gshop, gs, jetR).pair() getattr(self, 'hAng_JetPt_ch_MPIon_%s_%sScaled' % (label, gl)).Fill( - jet.pt(), fjext.lambda_beta_kappa(jet, jet_sd, beta, kappa, jetR)) + jet.pt(), fjext.lambda_beta_kappa(jet, jet_sd, alpha, kappa, jetR)) #--------------------------------------------------------------- @@ -1069,22 +1069,22 @@ def fill_jet_histograms(self, jetR, jp, jh, jch): ''' # Fill angularity histograms and response matrices - for beta in self.beta_list: - self.fill_RMs(jetR, beta, jp, jh, jch) + for alpha in self.alpha_list: + self.fill_RMs(jetR, alpha, jp, jh, jch) #--------------------------------------------------------------- # Fill jet histograms #--------------------------------------------------------------- - def fill_RMs(self, jetR, beta, jp, jh, jch): + def fill_RMs(self, jetR, alpha, jp, jh, jch): # Calculate angularities kappa = 1 - lp = fjext.lambda_beta_kappa(jp, beta, kappa, jetR) - lh = fjext.lambda_beta_kappa(jh, beta, kappa, jetR) - lch = fjext.lambda_beta_kappa(jch, beta, kappa, jetR) + lp = fjext.lambda_beta_kappa(jp, alpha, kappa, jetR) + lh = fjext.lambda_beta_kappa(jh, alpha, kappa, jetR) + lch = fjext.lambda_beta_kappa(jch, alpha, kappa, jetR) - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') + label = ("R%s_%s" % (str(jetR), str(alpha))).replace('.', '') if self.level in [None, 'ch']: getattr(self, 'hAng_JetPt_ch_%sScaled' % label).Fill(jch.pt(), lch) @@ -1094,7 +1094,7 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): gshop = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) jch_sd = self.utils.groom(gshop, gs, jetR).pair() getattr(self, 'hAng_JetPt_ch_%s_%sScaled' % (label, gl)).Fill( - jch.pt(), fjext.lambda_beta_kappa(jch, jch_sd, beta, kappa, jetR)) + jch.pt(), fjext.lambda_beta_kappa(jch, jch_sd, alpha, kappa, jetR)) if self.level in [None, 'h']: getattr(self, 'hAng_JetPt_h_%sScaled' % label).Fill(jh.pt(), lh) @@ -1104,7 +1104,7 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): gshop = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) jh_sd = self.utils.groom(gshop, gs, jetR).pair() getattr(self, 'hAng_JetPt_h_%s_%sScaled' % (label, gl)).Fill( - jh.pt(), fjext.lambda_beta_kappa(jh, jh_sd, beta, kappa, jetR)) + jh.pt(), fjext.lambda_beta_kappa(jh, jh_sd, alpha, kappa, jetR)) if self.level in [None, 'p']: getattr(self, 'hAng_JetPt_p_%sScaled' % label).Fill(jp.pt(), lp) @@ -1114,7 +1114,7 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): gshop = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) jp_sd = self.utils.groom(gshop, gs, jetR).pair() getattr(self, 'hAng_JetPt_p_%s_%sScaled' % (label, gl)).Fill( - jp.pt(), fjext.lambda_beta_kappa(jp, jp_sd, beta, kappa, jetR)) + jp.pt(), fjext.lambda_beta_kappa(jp, jp_sd, alpha, kappa, jetR)) if self.level == None: getattr(self, 'hResponse_ang_%sScaled' % label).Fill(lp, lch) @@ -1126,8 +1126,8 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): gshop_ch = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) jch_sd = self.utils.groom(gshop_ch, gs, jetR).pair() getattr(self, 'hResponse_ang_%s_%sScaled' % (label, gl)).Fill( - fjext.lambda_beta_kappa(jp, jp_sd, beta, kappa, jetR), \ - fjext.lambda_beta_kappa(jch, jch_sd, beta, kappa, jetR)) + fjext.lambda_beta_kappa(jp, jp_sd, alpha, kappa, jetR), \ + fjext.lambda_beta_kappa(jch, jch_sd, alpha, kappa, jetR)) ''' # Lambda at p-vs-ch-level for various bins in ch jet pT @@ -1146,7 +1146,7 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): getattr(self, 'hPhaseSpace_DeltaR_Pt_ch_%sScaled' % label).Fill( particle.pt(), deltaR / jetR) - lambda_i = lambda_beta_kappa_i(particle, jch, jetR, beta, 1) + lambda_i = lambda_beta_kappa_i(particle, jch, jetR, alpha, 1) getattr(self, 'hPhaseSpace_ang_DeltaR_ch_%sScaled' % label).Fill(deltaR / jetR, lambda_i) getattr(self, 'hPhaseSpace_ang_Pt_ch_%sScaled' % label).Fill(particle.pt(), lambda_i) @@ -1168,7 +1168,7 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): getattr(self, 'hPhaseSpace_DeltaR_Pt_p_%sScaled' % label).Fill( particle.pt(), deltaR / jetR) - lambda_i = lambda_beta_kappa_i(particle, jp, jetR, beta, 1) + lambda_i = lambda_beta_kappa_i(particle, jp, jetR, alpha, 1) getattr(self, 'hPhaseSpace_ang_DeltaR_p_%sScaled' % label).Fill(deltaR / jetR, lambda_i) getattr(self, 'hPhaseSpace_ang_Pt_p_%sScaled' % label).Fill(particle.pt(), lambda_i) @@ -1217,13 +1217,13 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): # SoftDrop jet angularities gshop_ch = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) jet_sd_ch = self.utils.groom(gshop_ch, gs, jetR).pair() - lch_sd = fjext.lambda_beta_kappa(jch, jet_sd_ch, beta, kappa, jetR) + lch_sd = fjext.lambda_beta_kappa(jch, jet_sd_ch, alpha, kappa, jetR) gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) jet_sd_h = self.utils.groom(gshop_h, gs, jetR).pair() - lh_sd = fjext.lambda_beta_kappa(jh, jet_sd_h, beta, kappa, jetR) + lh_sd = fjext.lambda_beta_kappa(jh, jet_sd_h, alpha, kappa, jetR) gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) jet_sd_p = self.utils.groom(gshop_p, gs, jetR).pair() - lp_sd = fjext.lambda_beta_kappa(jp, jet_sd_p, beta, kappa, jetR) + lp_sd = fjext.lambda_beta_kappa(jp, jet_sd_p, alpha, kappa, jetR) x = ([jch.pt(), jp.pt(), lch_sd, lp_sd]) x_array = array('d', x) @@ -1239,14 +1239,14 @@ def fill_RMs(self, jetR, beta, jp, jh, jch): #--------------------------------------------------------------- def fill_jet_histograms_MPI(self, jetR, jp, jh, jch): - for beta in self.beta_list: + for alpha in self.alpha_list: # Calculate angularities kappa = 1 - lh = fjext.lambda_beta_kappa(jh, beta, kappa, jetR) - lch = fjext.lambda_beta_kappa(jch, beta, kappa, jetR) + lh = fjext.lambda_beta_kappa(jh, alpha, kappa, jetR) + lch = fjext.lambda_beta_kappa(jch, alpha, kappa, jetR) - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') + label = ("R%s_%s" % (str(jetR), str(alpha))).replace('.', '') # 4D response matrices for "forward folding" from h to ch level x = ([jch.pt(), jh.pt(), lch, lh]) @@ -1260,10 +1260,10 @@ def fill_jet_histograms_MPI(self, jetR, jp, jh, jch): # SoftDrop jet angularities gshop_ch = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) jet_sd_ch = self.utils.groom(gshop_ch, gs, jetR).pair() - lch_sd = fjext.lambda_beta_kappa(jch, jet_sd_ch, beta, kappa, jetR) + lch_sd = fjext.lambda_beta_kappa(jch, jet_sd_ch, alpha, kappa, jetR) gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) jet_sd_h = self.utils.groom(gshop_h, gs, jetR).pair() - lh_sd = fjext.lambda_beta_kappa(jh, jet_sd_h, beta, kappa, jetR) + lh_sd = fjext.lambda_beta_kappa(jh, jet_sd_h, alpha, kappa, jetR) x = ([jch.pt(), jh.pt(), lch_sd, lh_sd]) x_array = array('d', x) diff --git a/pyjetty/alice_analysis/process/user/ang/process_data_ang.py b/pyjetty/alice_analysis/process/user/ang/process_data_ang.py new file mode 100755 index 000000000..c32548153 --- /dev/null +++ b/pyjetty/alice_analysis/process/user/ang/process_data_ang.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python3 + +""" + Analysis class to read a ROOT TTree of track information + and do jet-finding, and save basic histograms. + + Author: Ezra Lesser (elesser@berkeley.edu) with much code borrowed + from original script by James Mulligan (james.mulligan@berkeley.edu) +""" + +from __future__ import print_function + +# General +import os +import argparse +import numpy as np +from array import array + +# Data analysis and plotting +import ROOT + +# Fastjet via python (from external library heppy) +import fjext + +# Base class +from pyjetty.alice_analysis.process.user.substructure import process_data_base + +################################################################ +class ProcessData_ang(process_data_base.ProcessDataBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): + + # Initialize base class + super(ProcessData_ang, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + self.pt_bins = array('d', list(range(5, 305, 5))) + self.obs_bins_ang = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), + np.linspace(0.11, 0.8, 70))) + self.obs_bins_mass = np.concatenate( + (np.linspace(0, 0.9, 10), np.linspace(1, 9.8, 45), np.linspace(10, 14.5, 10), + np.linspace(15, 19, 5), np.linspace(20, 60, 9))) + + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_user_output_objects(self): + + for jetR in self.jetR_list: + + for observable in self.observable_list: + # Should only be two: observable == "ang" or "mass" + if observable != "ang" and observable != "mass": + raise ValueError("Observable %s is not implemented in this script" % observable) + + obs_bins = getattr(self, "obs_bins_" + observable) + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + if self.is_pp or self.include_no_subtraction: + name = ('h_%s_JetPt_R%s_%s' % (observable, jetR, obs_label)) if \ + len(obs_label) else ('h_%s_JetPt_R%s' % (observable, jetR)) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle(self.obs_names[observable]) + setattr(self, name, h) + + if not self.is_pp: + # Pb-Pb: have several R_max for contituent subtraction + max_distance = self.max_distance if isinstance(self.max_distance, list) \ + else self.max_distance[jetR] + for R_max in max_distance: + name = ('h_%s_JetPt_R%s_%s_Rmax%s' % ( + observable, jetR, obs_label, R_max)) if len(obs_label) else \ + ('h_%s_JetPt_R%s_Rmax%s' % (observable, jetR, R_max)) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle(self.obs_names[observable]) + setattr(self, name, h) + + #--------------------------------------------------------------- + # This function is called once for each jet subconfiguration + #--------------------------------------------------------------- + def fill_jet_histograms(self, observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix): + + if observable == "ang": + # Calculate angularity + ang = fjext.lambda_beta_kappa(jet, jet_groomed_lund.pair(), obs_setting, 1, jetR) \ + if grooming_setting else fjext.lambda_beta_kappa(jet, obs_setting, 1, jetR) + + # Fill histograms + getattr(self, "h_ang_JetPt_R%s_%s%s" % (jetR, obs_label, suffix)).Fill( + jet_pt_ungroomed, ang) + + # Only do jet mass stuff once per set of angularity configs + elif observable == "mass": + + if grooming_setting: + name = 'h_mass_JetPt_R%s_%s%s' % (jetR, obs_label, suffix) + j_groomed = jet_groomed_lund.pair() + if not j_groomed.has_constituents(): + # Untagged jet -- record underflow value + getattr(self, name).Fill(jet_pt_ungroomed, -1) + else: + getattr(self, name).Fill(jet_pt_ungroomed, j_groomed.m()) + + else: + name = 'h_mass_JetPt_R%s%s' % (jetR, suffix) + getattr(self, name).Fill(jet_pt_ungroomed, jet.m()) + + +################################################################## +if __name__ == '__main__': + # Define arguments + parser = argparse.ArgumentParser(description='Process data') + parser.add_argument('-f', '--inputFile', action='store', + type=str, metavar='inputFile', + default='AnalysisResults.root', + help='Path of ROOT file containing TTrees') + parser.add_argument('-c', '--configFile', action='store', + type=str, metavar='configFile', + default='config/analysis_config.yaml', + help="Path of config file for analysis") + parser.add_argument('-o', '--outputDir', action='store', + type=str, metavar='outputDir', + default='./TestOutput', + help='Output directory for output to be written to') + + # Parse the arguments + args = parser.parse_args() + + print('Configuring...') + print('inputFile: \'{0}\''.format(args.inputFile)) + print('configFile: \'{0}\''.format(args.configFile)) + print('ouputDir: \'{0}\"'.format(args.outputDir)) + print('----------------------------------------------------------------') + + # If invalid inputFile is given, exit + if not os.path.exists(args.inputFile): + raise ValueError("File \"%s\" does not exist" % args.inputFile) + + # If invalid configFile is given, exit + if not os.path.exists(args.configFile): + raise ValueError("File \"%s\" does not exist" % args.configFile) + + analysis = ProcessData_ang( + input_file=args.inputFile, config_file=args.configFile, output_dir=args.outputDir) + analysis.process_data() diff --git a/pyjetty/alice_analysis/process/user/ang/process_jewel_truth_level_ang.py b/pyjetty/alice_analysis/process/user/ang/process_jewel_truth_level_ang.py new file mode 100755 index 000000000..7f64d7bb4 --- /dev/null +++ b/pyjetty/alice_analysis/process/user/ang/process_jewel_truth_level_ang.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python3 + +""" + Analysis class to read a ROOT TTree of track information + and do jet-finding, and save basic histograms. + This specific code is to run over jewel generator data to produce histograms (at truth level) that will then be compared to + the data. That is, this base is to run over MC, but only at truth level, without response matrices. + + Author: Ezra Lesser (elesser@berkeley.edu) + Based on code by: James Mulligan (james.mulligan@berkeley.edu) +""" + +from __future__ import print_function + +# General +import os +import sys +import argparse + +# Data analysis and plotting +import ROOT +import yaml +import numpy as np +from array import array + +# Fastjet via python (from external library heppy) +import fjext + +# Load pyjetty ROOT utils +ROOT.gSystem.Load('libpyjetty_rutil') + +# Base class +from pyjetty.alice_analysis.process.user.substructure import process_jewel_generated_base + +################################################################ +class Process_CurvesFromJewelTracks_ang(process_jewel_generated_base.CurvesFromJewelTracks): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', **kwargs): + + # Initialize base class + super(Process_CurvesFromJewelTracks_ang, self).__init__(input_file, config_file, output_dir, **kwargs) + + self.pt_bins = array('d', list(range(5, 305, 5))) + self.obs_bins_ang = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), + np.linspace(0.11, 0.8, 70))) + self.obs_bins_mass = np.concatenate( + (np.linspace(0, 0.9, 10), np.linspace(1, 9.8, 45), np.linspace(10, 14.5, 10), + np.linspace(15, 19, 5), np.linspace(20, 60, 9))) + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_user_output_objects(self, label=''): + + for jetR in self.jetR_list: + for observable in self.observable_list: + + # Should only be two options: observable == "ang" or "mass" + if observable != "ang" and observable != "mass": + raise ValueError("Observable %s is not implemented in this script" % observable) + + obs_bins = getattr(self, "obs_bins_" + observable) + for i, obs_setting in enumerate(self.obs_settings[observable]): + + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + # Histogram name based on recoil subtraction method + names = [] + if not self.thermal_subtraction_method or \ + 'negative_recombiner' in self.thermal_subtraction_method: + names.append('h_%s_JetPt_R%s_%s%s' % (observable, jetR, obs_label, label) if \ + len(obs_label) else 'h_%s_JetPt_R%s%s' % (observable, jetR, label)) + elif 'gridsub' in self.thermal_subtraction_method: + for gridsize in self.gridsizes: + names.append('h_%s_JetPt_R%s_%s_gridsub_%s%s' % (observable, jetR, obs_label, gridsize, label) \ + if len(obs_label) else 'h_%s_JetPt_R%s_gridsub_%s%s' % (observable, jetR, gridsize, label)) + elif '4momsub' in self.thermal_subtraction_method: + names.append('h_%s_JetPt_R%s_%s_4momsub%s' % (self.observable, jetR, obs_label, label) \ + if len(obs_label) else 'h_%s_JetPt_R%s_4momsub%s' % (self.observable, jetR, label)) + else: + raise ValueError("Recoil subtraction method not recognized") + + for name in names: + h = ROOT.TH2F(name, name, len(self.pt_bins) - 1, self.pt_bins, len(obs_bins) - 1, obs_bins) + h.GetXaxis().SetTitle("#it{p}_{T}^{ch jet}") + h.GetYaxis().SetTitle(self.obs_names[observable]) + h.Sumw2() + setattr(self, name, h) + + + #--------------------------------------------------------------- + # This function is called once for each jet subconfiguration + #--------------------------------------------------------------- + def fill_jet_histograms( + self, observable, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, + obs_label, jet_pt_ungroomed, suffix=None, label=''): + + check_user_index = False + + if not self.thermal_subtraction_method: + name = 'h_%s_JetPt_R%s_%s%s' % (observable, jetR, obs_label, label) if \ + len(obs_label) else 'h_%s_JetPt_R%s%s' % (observable, jetR, label) + elif 'negative_recombiner' in self.thermal_subtraction_method: + name = 'h_%s_JetPt_R%s_%s%s' % (observable, jetR, obs_label, label) if \ + len(obs_label) else 'h_%s_JetPt_R%s%s' % (observable, jetR, label) + check_user_index = True + elif 'gridsub' in self.thermal_subtraction_method: + name = 'h_%s_JetPt_R%s_%s_gridsub_%s%s' % (observable, jetR, obs_label, suffix, label) \ + if len(obs_label) else 'h_%s_JetPt_R%s_gridsub_%s%s' % (observable, jetR, suffix, label) + elif '4momsub' in self.thermal_subtraction_method: + name = 'h_%s_JetPt_R%s_%s_4momsub%s' % (self.observable, jetR, obs_label, label) \ + if len(obs_label) else 'h_%s_JetPt_R%s_4momsub%s' % (self.observable, jetR, label) + else: + raise ValueError("Recoil subtraction method not recognized") + + obs = None + groomed_jet = None + + # Check to make sure that the jet is "real" with positive pT + if grooming_setting: + groomed_jet = jet_groomed_lund.pair() + if groomed_jet.user_index() < 0: + return + else: # no grooming + if jet.user_index() < 0: + return + + ####################################################################### + if observable == "ang": + kappa = 1 + + if grooming_setting: + groomed_jet = jet_groomed_lund.pair() + obs = fjext.lambda_beta_kappa(jet, groomed_jet, obs_setting, kappa, jetR, check_user_index) + + else: + obs = fjext.lambda_beta_kappa(jet, obs_setting, kappa, jetR, check_user_index) + + ####################################################################### + elif observable == "mass": + # m^2 = E^2 - p^2 + + if grooming_setting: + j_groomed = jet_groomed_lund.pair() + if not j_groomed.has_constituents(): + # Untagged jet -- record underflow value + obs = -1 + else: + obs = j_groomed.m() + + else: + obs = jet.m() + + + # Fill histograms + getattr(self, name).Fill(jet_pt_ungroomed, obs) + +################################################################## +if __name__ == '__main__': + # Define arguments + parser = argparse.ArgumentParser(description='Process Generator For Theory Comparison') + parser.add_argument('-f', '--inputFile', action='store', + type=str, metavar='inputFile', + default='AnalysisResults.root', + help='Path of ROOT file containing TTrees') + parser.add_argument('-c', '--configFile', action='store', + type=str, metavar='configFile', + default='config/analysis_config.yaml', + help="Path of config file for analysis") + parser.add_argument('-o', '--outputDir', action='store', + type=str, metavar='outputDir', + default='./TestOutput', + help='Output directory for output to be written to') + + # Parse the arguments + args = parser.parse_args() + + print('Configuring...') + print('inputFile: \'{0}\''.format(args.inputFile)) + print('configFile: \'{0}\''.format(args.configFile)) + print('ouputDir: \'{0}\"'.format(args.outputDir)) + print('----------------------------------------------------------------') + + # If invalid inputFile is given, exit + if not os.path.exists(args.inputFile): + print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) + sys.exit(0) + + # If invalid configFile is given, exit + if not os.path.exists(args.configFile): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + analysis = Process_CurvesFromJewelTracks_ang(input_file=args.inputFile, config_file=args.configFile, output_dir=args.outputDir) + analysis.process_gen() diff --git a/pyjetty/alice_analysis/process/user/ang/process_mc_ang.py b/pyjetty/alice_analysis/process/user/ang/process_mc_ang.py new file mode 100755 index 000000000..2bc8ff553 --- /dev/null +++ b/pyjetty/alice_analysis/process/user/ang/process_mc_ang.py @@ -0,0 +1,271 @@ +#!/usr/bin/env python3 + +""" + Analysis class to read a ROOT TTree of MC track information + and do jet-finding, and save response histograms. + + Author: James Mulligan (james.mulligan@berkeley.edu) +""" + +from __future__ import print_function + +# General +import os +import sys +import argparse + +# Data analysis and plotting +import numpy as np +import ROOT +import yaml +from array import * + +# Fastjet via python (from external library heppy) +import fjext + +# Analysis utilities +from pyjetty.alice_analysis.process.base import process_io +from pyjetty.alice_analysis.process.base import process_io_emb +from pyjetty.alice_analysis.process.base import jet_info +from pyjetty.alice_analysis.process.user.substructure import process_mc_base +from pyjetty.alice_analysis.process.base import thermal_generator +from pyjetty.mputils import CEventSubtractor + +################################################################ +class ProcessMC_ang(process_mc_base.ProcessMCBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): + + # Initialize base class + super(ProcessMC_ang, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) + + self.pt_bins = array('d', list(range(5, 100, 5)) + list(range(100, 210, 10))) + self.obs_bins_ang = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), + np.linspace(0.11, 0.8, 70))) + self.obs_bins_mass = np.concatenate( + (np.linspace(0, 0.9, 10), np.linspace(1, 9.8, 45), np.linspace(10, 14.5, 10), + np.linspace(15, 19, 5), np.linspace(20, 60, 9))) + + # Override default behavior to create delta-observable histograms in Pb-Pb case + self.fill_delta_obs = True + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_user_output_objects_R(self, jetR): + + for observable in self.observable_list: + # Should only be two: observable == "ang" or "mass" + if observable != "ang" and observable != "mass": + raise ValueError("Observable %s is not implemented in this script" % observable) + + obs_name = self.obs_names[observable] + obs_bins = getattr(self, "obs_bins_" + observable) + # Use more finely binned pT bins for TH2s than for the RMs + pt_bins = array('d', list(range(0, 201, 1))) + + # Loop over subobservable (alpha value) + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + # Create RM histograms + if self.is_pp: + self.create_histograms(observable, jetR, obs_label) + + else: + + max_distance = self.max_distance if isinstance(self.max_distance, list) else \ + self.max_distance[jetR] + for R_max in max_distance: + self.create_histograms(observable, jetR, obs_label, R_max) + + if self.thermal_model: + max_distance = self.max_distance if isinstance(self.max_distance, list) else \ + self.max_distance[jetR] + for R_max in max_distance: + name = 'h_%s_JetPt_R%s_%s_Rmax%s' % (observable, jetR, obs_label, R_max) if \ + len(obs_label) else ('h_%s_JetPt_R%s_Rmax%s' % (observable, jetR, R_max)) + h = ROOT.TH2F(name, name, len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle(obs_name) + h.Sumw2() + setattr(self, name, h) + + name = ('h_%s_JetPt_Truth_R%s_%s' % (observable, jetR, obs_label)) if \ + len(obs_label) else ('h_%s_JetPt_Truth_R%s' % (observable, jetR)) + h = ROOT.TH2F(name, name, len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('#it{p}_{T,truth}^{ch jet}') + h.GetYaxis().SetTitle(obs_name + '^{truth}') + h.Sumw2() + setattr(self, name, h) + + #--------------------------------------------------------------- + # Create angularity response histograms + #--------------------------------------------------------------- + def create_histograms(self, observable, jetR, obs_label, R_max = None): + + if R_max: + suffix = '_Rmax' + str(R_max) + else: + suffix = '' + + # LaTeX formatted observable name + obs_name = self.obs_names[observable] + + # Retrieve binnings from memory + pt_bins = self.pt_bins + obs_bins = getattr(self, "obs_bins_" + observable) + + # Create THn of response for ang + if self.fill_RM_histograms: + dim = 4; + title = ['#it{p}_{T,det}^{ch jet}', '#it{p}_{T,truth}^{ch jet}', + obs_name + '^{det}', obs_name + '^{truth}'] + nbins = [len(pt_bins)-1, len(pt_bins)-1, len(obs_bins)-1, len(obs_bins)-1] + min_li = [pt_bins[0], pt_bins[0], obs_bins[0], obs_bins[0] ] + max_li = [pt_bins[-1], pt_bins[-1], obs_bins[-1], obs_bins[-1] ] + + name = ('hResponse_JetPt_%s_R%s_%s%s' % (observable, jetR, obs_label, suffix)) if \ + len(obs_label) else ('hResponse_JetPt_%s_R%s%s' % (observable, jetR, suffix)) + nbins = (nbins) + xmin = (min_li) + xmax = (max_li) + nbins_array = array('i', nbins) + xmin_array = array('d', xmin) + xmax_array = array('d', xmax) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() # enables calculation of errors + setattr(self, name, h) + + name = 'hResidual_JetPt_%s_R%s_%s%s' % (observable, jetR, obs_label, suffix) if \ + len(obs_label) else ('hResidual_JetPt_%s_R%s%s' % (observable, jetR, suffix)) + h = ROOT.TH3F(name, name, 20, 0, 200, 50, 0., 0.5, 200, -2., 2.) + h.GetXaxis().SetTitle('#it{p}_{T,truth}^{ch jet}') + h.GetYaxis().SetTitle(obs_name + '^{truth}') + h.GetZaxis().SetTitle('#frac{%s^{det}-%s^{truth}}{%s^{truth}}' % \ + (obs_name, obs_name, obs_name)) + h.Sumw2() + setattr(self, name, h) + + if not self.is_pp and self.fill_delta_obs: + # Delta-observable histograms for studying background subtraction effects + name = 'hDeltaObs_%s_emb_R%s_%s%s' % (observable, jetR, obs_label, suffix) + h = None + if observable == "ang": + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1, 1) + elif observable == "mass": + h = ROOT.TH2F(name, name, 300, 0, 300, 400, -100, 100) + h.Sumw2() + setattr(self, name, h) + + #--------------------------------------------------------------- + # Calculate the observable given a jet + #--------------------------------------------------------------- + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + if observable == "ang": + + return fjext.lambda_beta_kappa(jet, jet_groomed_lund.pair(), obs_setting, 1, jetR) \ + if grooming_setting else fjext.lambda_beta_kappa(jet, obs_setting, 1, jetR) + + elif observable == "mass": + + if grooming_setting: + j_groomed = jet_groomed_lund.pair() + if not j_groomed.has_constituents(): + # Untagged jet -- record underflow value + return -1 + else: + return j_groomed.m() + + return jet.m() + + # Should not be any other observable + raise ValueError("Observable %s not implemented" % observable) + + + #--------------------------------------------------------------- + # This function is called once for each jet subconfiguration + # Fill 2D histogram of (pt, obs) + #--------------------------------------------------------------- + def fill_observable_histograms(self, observable, hname, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + obs = self.calculate_observable(observable, jet, jet_groomed_lund, jetR, + obs_setting, grooming_setting, obs_label, jet_pt_ungroomed) + + # Fill histograms + name = hname.format(observable, obs_label).replace("__", "_") + name = name[:-1] if name[-1] == "_" else name + getattr(self, name).Fill(jet_pt_ungroomed, obs) + + + #--------------------------------------------------------------- + # Fill matched jet histograms + #--------------------------------------------------------------- + def fill_matched_jet_histograms(self, observable, jet_det, jet_det_groomed_lund, + jet_truth, jet_truth_groomed_lund, jet_pp_det, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + R_max, suffix, **kwargs): + + obs_det = self.calculate_observable(observable, jet_det, jet_det_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_det_ungroomed) + + obs_tru = self.calculate_observable(observable, jet_truth, jet_truth_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_truth_ungroomed) + + # Fill histograms + self.fill_response(observable, jetR, jet_det.pt(), jet_truth.pt(), + obs_det, obs_tru, obs_label, R_max) + + +################################################################## +if __name__ == '__main__': + # Define arguments + parser = argparse.ArgumentParser(description='Process MC') + parser.add_argument('-f', '--inputFile', action='store', + type=str, metavar='inputFile', + default='AnalysisResults.root', + help='Path of ROOT file containing TTrees') + parser.add_argument('-c', '--configFile', action='store', + type=str, metavar='configFile', + default='config/analysis_config.yaml', + help="Path of config file for analysis") + parser.add_argument('-o', '--outputDir', action='store', + type=str, metavar='outputDir', + default='./TestOutput', + help='Output directory for output to be written to') + + # Parse the arguments + args = parser.parse_args() + + print('Configuring...') + print('inputFile: \'{0}\''.format(args.inputFile)) + print('configFile: \'{0}\''.format(args.configFile)) + print('ouputDir: \'{0}\"'.format(args.outputDir)) + + # If invalid inputFile is given, exit + if not os.path.exists(args.inputFile): + print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) + sys.exit(0) + + # If invalid configFile is given, exit + if not os.path.exists(args.configFile): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + analysis = ProcessMC_ang(input_file=args.inputFile, config_file=args.configFile, output_dir=args.outputDir) + analysis.process_mc() diff --git a/pyjetty/alice_analysis/process/user/ang_pp/process_parton_hadron_ang.py b/pyjetty/alice_analysis/process/user/ang/process_parton_hadron_ang.py similarity index 100% rename from pyjetty/alice_analysis/process/user/ang_pp/process_parton_hadron_ang.py rename to pyjetty/alice_analysis/process/user/ang/process_parton_hadron_ang.py diff --git a/pyjetty/alice_analysis/process/user/ang/pythia_parton_hadron.py b/pyjetty/alice_analysis/process/user/ang/pythia_parton_hadron.py new file mode 100755 index 000000000..c4b32c96c --- /dev/null +++ b/pyjetty/alice_analysis/process/user/ang/pythia_parton_hadron.py @@ -0,0 +1,1227 @@ +#!/usr/bin/env python + +from __future__ import print_function + +import fastjet as fj +import fjcontrib +import fjext + +import ROOT + +import tqdm +import yaml +import copy +import argparse +import os + +from pyjetty.mputils import * + +from heppy.pythiautils import configuration as pyconf +import pythia8 +import pythiafjext +import pythiaext + +from pyjetty.alice_analysis.process.base import process_base +from pyjetty.alice_analysis.process.user.ang.helpers import lambda_alpha_kappa_i + +from array import array +import numpy as np + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) +# Automatically set Sumw2 when creating new histograms +ROOT.TH1.SetDefaultSumw2() + +################################################################ +class pythia_parton_hadron(process_base.ProcessBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', + debug_level=0, args=None, **kwargs): + + super(pythia_parton_hadron, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + self.initialize_config(args) + + #--------------------------------------------------------------- + # Main processing function + #--------------------------------------------------------------- + def pythia_parton_hadron(self, args): + + # Create ROOT TTree file for storing raw PYTHIA particle information + outf_path = os.path.join(self.output_dir, args.tree_output_fname) + outf = ROOT.TFile(outf_path, 'recreate') + outf.cd() + + # Initialize response histograms + self.initialize_hist() + + pinfo('user seed for pythia', self.user_seed) + # mycfg = ['PhaseSpace:pThatMin = 100'] + mycfg = ['Random:setSeed=on', 'Random:seed={}'.format(self.user_seed)] + mycfg.append('HadronLevel:all=off') + + # PYTHIA instance with MPI off + setattr(args, "py_noMPI", True) + pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) + + # print the banner first + fj.ClusterSequence.print_banner() + print() + + self.init_jet_tools() + self.calculate_events(pythia) + pythia.stat() + print() + + # PYTHIA instance with MPI on + setattr(args, "py_noMPI", False) + pythia_MPI = pyconf.create_and_init_pythia_from_args(args, mycfg) + self.calculate_events(pythia_MPI, MPIon=True) + print() + + if not self.no_tree: + for jetR in self.jetR_list: + getattr(self, "tw_R%s" % str(jetR).replace('.', '')).fill_tree() + + self.scale_print_final_info(pythia, pythia_MPI) + + outf.Write() + outf.Close() + + self.save_output_objects() + + #--------------------------------------------------------------- + # Initialize config file into class members + #--------------------------------------------------------------- + def initialize_config(self, args): + + # Call base class initialization + process_base.ProcessBase.initialize_config(self) + + # Read config file + with open(self.config_file, 'r') as stream: + config = yaml.safe_load(stream) + + if not os.path.exists(self.output_dir): + os.makedirs(self.output_dir) + + # Defaults to None if not in use + self.level = args.no_match_level + + self.jetR_list = config["jetR"] + + self.user_seed = args.user_seed + self.nev = args.nev + + self.observable_list = config["process_observables"] + self.obs_settings = {} + self.obs_grooming_settings = {} + self.obs_names = {} + for obs in self.observable_list: + + obs_config_dict = config[obs] + obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + self.obs_settings[obs] = self.utils.obs_settings( + obs, obs_config_dict, obs_subconfig_list) + self.obs_grooming_settings[obs] = self.utils.grooming_settings(obs_config_dict) + + self.obs_names[obs] = obs_config_dict["common_settings"]["xtitle"] + + # Construct set of unique grooming settings + self.grooming_settings = [] + lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observable_list] + for observable in lists_grooming: + for setting in observable: + if setting not in self.grooming_settings and setting != None: + self.grooming_settings.append(setting) + + # Manually added binnings for RM and scaling histograms + self.pt_bins = array('d', list(range(5, 210, 5))) + self.obs_bins_ang = np.concatenate((np.linspace(0, 0.0009, 10), np.linspace(0.001, 0.009, 9), + np.linspace(0.01, 0.1, 19), np.linspace(0.11, 1., 90))) + self.obs_bins_mass = np.concatenate( + (np.linspace(0, 0.9, 10), np.linspace(1, 9.8, 45), np.linspace(10, 14.5, 10), + np.linspace(15, 19, 5), np.linspace(20, 60, 9))) + + # hadron level - ALICE tracking restriction + self.max_eta_hadron = 0.9 + + # Whether or not to rescale final jet histograms based on sigma/N + self.no_scale = args.no_scale + + # Whether or not to save particle info in raw tree structure + self.no_tree = args.no_tree + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_hist(self): + + self.hNevents = ROOT.TH1I("hNevents", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + self.hNeventsMPI = ROOT.TH1I("hNeventsMPI", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + + for jetR in self.jetR_list: + + # Store a list of all the histograms just so that we can rescale them later + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + setattr(self, hist_list_name, []) + hist_list_name_MPIon = "hist_list_MPIon_R%s" % str(jetR).replace('.', '') + setattr(self, hist_list_name_MPIon, []) + + R_label = "R" + str(jetR).replace('.', '') + 'Scaled' + + for MPI in ["", "MPIon_"]: + R_label = MPI + R_label + list_name = hist_list_name_MPIon if MPI else hist_list_name + if self.level in [None, 'ch']: + name = 'hJetPt_ch_%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{ch jet};#frac{dN}{dp_{T}^{ch jet}};', 300, 0, 300) + h.Sumw2() # enables calculation of errors + setattr(self, name, h) + getattr(self, list_name).append(h) + + name = 'hNconstit_Pt_ch_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + if self.level in [None, 'h']: + name = 'hJetPt_h_%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{jet, h};#frac{dN}{dp_{T}^{jet, h}};', 300, 0, 300) + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + name = 'hNconstit_Pt_h_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{h jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{h jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + if self.level in [None, 'p']: + name = 'hJetPt_p_%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{jet, parton};#frac{dN}{dp_{T}^{jet, parton}};', + 300, 0, 300) + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + name = 'hNconstit_Pt_p_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{p jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{p jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + if self.level == None: + name = 'hJetPtRes_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 1.) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle( + '#frac{#it{p}_{T}^{parton jet}-#it{p}_{T}^{ch jet}}{#it{p}_{T}^{parton jet}}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + name = 'hResponse_JetPt_%s' % R_label + h = ROOT.TH2F(name, name, 200, 0, 200, 200, 0, 200) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + ''' + # Jet multiplicity for matched jets with a cut at ch-jet level + name = 'hNconstit_Pt_ch_PtBinCH60-80_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + name = 'hNconstit_Pt_h_PtBinCH60-80_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{h jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{h jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + + name = 'hNconstit_Pt_p_PtBinCH60-80_%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{parton jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, list_name).append(h) + ''' + + for obs in self.observable_list: + if obs != "ang" and obs != "mass": + raise ValueError("Observable %s not yet implemented in this script!" % obs) + + for i in range(len(self.obs_settings[obs])): + + obs_setting = self.obs_settings[obs][i] + grooming_setting = self.obs_grooming_settings[obs][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + label = ("R%s_%s" % (jetR, obs_label)).replace('.', '') + obs_bins = getattr(self, "obs_bins_"+obs) + + if self.level in [None, 'ch']: + name = 'h_%s_JetPt_ch_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{ch jet}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'h_%s_JetPt_ch_MPIon_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{ch jet}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name_MPIon).append(h) + + + if self.level in [None, 'h']: + name = 'h_%s_JetPt_h_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{jet, h}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{h}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'p']: + name = 'h_%s_JetPt_p_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{jet, parton}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{parton}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level == None: + name = 'hResponse_%s_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(obs_bins)-1, obs_bins, len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle(self.obs_names[obs]+'^{parton}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + ''' + name = 'hResponse_%s_PtBinCH20-40_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) + h.GetXaxis().SetTitle(self.obs_names[obs]+'^{parton}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hResponse_%s_PtBinCH40-60_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) + h.GetXaxis().SetTitle(self.obs_names[obs]+'^{parton}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hResponse_%s_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) + h.GetXaxis().SetTitle(self.obs_names[obs]+'^{parton}') + h.GetYaxis().SetTitle(self.obs_names[obs]+'^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Phase space plots integrated over all pT bins + name = 'hPhaseSpace_DeltaR_Pt_ch_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + 150, 0, 1.5) + h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') + h.GetYaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_DeltaR_ch_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 150, 0, 1.5, + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_Pt_ch_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_DeltaR_Pt_p_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + 150, 0, 1.5) + h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') + h.GetYaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_DeltaR_p_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 150, 0, 1.5, + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_Pt_p_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Phase space plots binned in ch jet pT + name = 'hPhaseSpace_DeltaR_Pt_ch_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + 150, 0, 1.5) + h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') + h.GetYaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_DeltaR_Pt_p_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + 150, 0, 1.5) + h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') + h.GetYaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_DeltaR_ch_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 150, 0, 1.5, + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_DeltaR_p_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 150, 0, 1.5, + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_Pt_ch_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{ch jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hPhaseSpace_%s_Pt_p_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], + self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) + h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') + h.GetYaxis().SetTitle('(#lambda_{#alpha=%s, i})_{parton jet}' % str(alpha)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Annulus plots for amount of lambda contained within some r < R + self.annulus_plots_num_r = 150 + self.annulus_plots_max_x = 1.5 + low_bound = self.annulus_plots_max_x / self.annulus_plots_num_r / 2. + up_bound = self.annulus_plots_max_x + low_bound + + name = 'hAnnulus_%s_ch_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, + 100, 0, 1.) + h.GetXaxis().SetTitle('(#it{r} / #it{R})_{ch jet}') + h.GetYaxis().SetTitle( + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{ch jet}') % (str(alpha), str(alpha))) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hAnnulus_%s_ch_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, + 100, 0, 1.) + h.GetXaxis().SetTitle('(#it{r} / #it{R})_{ch jet}') + h.GetYaxis().SetTitle( + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{ch jet}') % (str(alpha), str(alpha))) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hAnnulus_%s_p_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, + 100, 0, 1.) + h.GetXaxis().SetTitle('(#it{r} / #it{R})_{parton jet}') + h.GetYaxis().SetTitle( + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{parton jet}') % (str(alpha), str(alpha))) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hAnnulus_%s_p_PtBinCH60-80_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, + 100, 0, 1.) + h.GetXaxis().SetTitle('(#it{r} / #it{R})_{parton jet}') + h.GetYaxis().SetTitle( + ('(#frac{#lambda_{#alpha=%s}(#it{r})}' + \ + '{#lambda_{#alpha=%s}(#it{R})})_{parton jet}') % (str(alpha), str(alpha))) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + ''' + + name = "h_%sResidual_JetPt_%sScaled" % (obs, label) + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -3., 1.) + h.GetXaxis().SetTitle('p_{T}^{jet, parton}') + h.GetYaxis().SetTitle( + '#frac{' + self.obs_names[obs] + '^{parton}-' + \ + self.obs_names[obs] + '^{ch}}{' + self.obs_names[obs] + '^{parton}}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = "h_%sDiff_JetPt_%sScaled" % (obs, label) + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -2., 2.) + h.GetXaxis().SetTitle('#it{p}_{T}^{jet, ch}') + h.GetYaxis().SetTitle(self.obs_names[obs] + '^{parton}-' + \ + self.obs_names[obs] + '^{ch}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Create THn of response + dim = 4 + title = ['p_{T}^{ch jet}', 'p_{T}^{parton jet}', + self.obs_names[obs]+'^{ch}', self.obs_names[obs]+'^{parton}'] + nbins = [len(self.pt_bins)-1, len(self.pt_bins)-1, + len(obs_bins)-1, len(obs_bins)-1] + min_li = [self.pt_bins[0], self.pt_bins[0], + obs_bins[0], obs_bins[0] ] + max_li = [self.pt_bins[-1], self.pt_bins[-1], + obs_bins[-1], obs_bins[-1] ] + + name = 'hResponse_JetPt_%s_ch_%sScaled' % (obs, label) + nbins = (nbins) + xmin = (min_li) + xmax = (max_li) + nbins_array = array('i', nbins) + xmin_array = array('d', xmin) + xmax_array = array('d', xmax) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Another set of THn for full hadron folding + title = ['p_{T}^{h jet}', 'p_{T}^{parton jet}', + self.obs_names[obs] + '^{h}', self.obs_names[obs] + '^{parton}'] + + name = 'hResponse_JetPt_%s_h_%sScaled' % (obs, label) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Finally, a set of THn for folding H --> CH (with MPI on) + title = ['p_{T}^{ch jet}', 'p_{T}^{h jet}', + self.obs_names[obs] + '^{ch}', self.obs_names[obs] + '^{h}'] + + name = 'hResponse_JetPt_%s_Fnp_%sScaled' % (obs, label) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name_MPIon).append(h) + + #--------------------------------------------------------------- + # Initiate jet defs, selectors, and sd (if required) + #--------------------------------------------------------------- + def init_jet_tools(self): + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + if not self.no_tree: + # Initialize tree writer + name = 'particle_unscaled_R%s' % jetR_str + t = ROOT.TTree(name, name) + setattr(self, "t_R%s" % jetR_str, t) + tw = RTreeWriter(tree=t) + setattr(self, "tw_R%s" % jetR_str, tw) + + # set up our jet definition and a jet selector + jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) + setattr(self, "jet_def_R%s" % jetR_str, jet_def) + print(jet_def) + + pwarning('max eta for particles after hadronization set to', self.max_eta_hadron) + parts_selector_h = fj.SelectorAbsEtaMax(self.max_eta_hadron) + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + jet_selector = fj.SelectorPtMin(5.0) & \ + fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + setattr(self, "jet_selector_R%s" % jetR_str, jet_selector) + + #max_eta_parton = self.max_eta_hadron + 2. * jetR + #setattr(self, "max_eta_parton_R%s" % jetR_str, max_eta_parton) + #pwarning("Max eta for partons with jet R =", jetR, "set to", max_eta_parton) + #parts_selector_p = fj.SelectorAbsEtaMax(max_eta_parton) + #setattr(self, "parts_selector_p_R%s" % jetR_str, parts_selector_p) + + count1 = 0 # Number of jets rejected from ch-h matching + setattr(self, "count1_R%s" % jetR_str, count1) + count2 = 0 # Number of jets rejected from h-p matching + setattr(self, "count2_R%s" % jetR_str, count2) + + + #--------------------------------------------------------------- + # Calculate events and pass information on to jet finding + #--------------------------------------------------------------- + def calculate_events(self, pythia, MPIon=False): + + iev = 0 # Event loop count + + if MPIon: + hNevents = self.hNeventsMPI + else: + hNevents = self.hNevents + + while hNevents.GetBinContent(1) < self.nev: + if not pythia.next(): + continue + + parts_pythia_p = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + hstatus = pythia.forceHadronLevel() + if not hstatus: + #pwarning('forceHadronLevel false event', iev) + continue + #parts_pythia_h = pythiafjext.vectorize_select( + # pythia, [pythiafjext.kHadron, pythiafjext.kCharged]) + parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + parts_pythia_hch = pythiafjext.vectorize_select( + pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + + """ TODO: fix for multiple jet R + parts_pythia_p_selected = parts_selector_p(parts_pythia_p) + parts_pythia_h_selected = parts_selector_h(parts_pythia_h) + parts_pythia_hch_selected = parts_selector_h(parts_pythia_hch) + + if self.debug_level > 1: + pinfo('debug partons...') + for p in parts_pythia_p_selected: + pyp = pythiafjext.getPythia8Particle(p) + print(pyp.name()) + pinfo('debug hadrons...') + for p in parts_pythia_h_selected: + pyp = pythiafjext.getPythia8Particle(p) + print(pyp.name()) + pinfo('debug ch. hadrons...') + for p in parts_pythia_hch_selected: + pyp = pythiafjext.getPythia8Particle(p) + print(pyp.name()) + """ + + # Some "accepted" events don't survive hadronization step -- keep track here + hNevents.Fill(0) + self.find_jets_fill_trees(parts_pythia_p, parts_pythia_h, parts_pythia_hch, iev, MPIon) + + iev += 1 + + + #--------------------------------------------------------------- + # Find jets, do matching between levels, and fill histograms & trees + #--------------------------------------------------------------- + def find_jets_fill_trees(self, parts_pythia_p, parts_pythia_h, parts_pythia_hch, + iev, MPIon=False): + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + jet_selector = getattr(self, "jet_selector_R%s" % jetR_str) + jet_def = getattr(self, "jet_def_R%s" % jetR_str) + t = None; tw = None; + if not self.no_tree: + t = getattr(self, "t_R%s" % jetR_str) + tw = getattr(self, "tw_R%s" % jetR_str) + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + + # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) + jets_p = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_p))) + jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_h))) + jets_ch = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_hch))) + + if MPIon: + if self.level: + for jet in locals()["jets_" + self.level]: + self.fill_unmatched_histograms(jetR, jet, self.level, MPI=True) + continue # Don't need to do matching + else: + for jet in jets_p: + self.fill_unmatched_histograms(jetR, jet, 'p', MPI=True) + for jet in jets_h: + self.fill_unmatched_histograms(jetR, jet, 'h', MPI=True) + for jet in jets_ch: + self.fill_unmatched_histograms(jetR, jet, 'ch', MPI=True) + + else: # MPI off + if self.level: + for jet in locals()["jets_" + self.level]: + self.fill_unmatched_histograms(jetR, jet, self.level) + if not self.no_tree: + self.fill_unmatched_jet_tree(tw, jetR, iev, jet) + continue # Don't need to do matching + else: + for jet in jets_p: + self.fill_unmatched_histograms(jetR, jet, 'p') + for jet in jets_h: + self.fill_unmatched_histograms(jetR, jet, 'h') + for jet in jets_ch: + self.fill_unmatched_histograms(jetR, jet, 'ch') + + # Start the matching procedure + for i,jchh in enumerate(jets_ch): + + # match hadron (full) jet + drhh_list = [] + for j, jh in enumerate(jets_h): + drhh = jchh.delta_R(jh) + if drhh < jetR / 2.: + drhh_list.append((j,jh)) + if len(drhh_list) != 1: + count1 += 1 + else: # Require unique match + j, jh = drhh_list[0] + + # match parton level jet + dr_list = [] + for k, jp in enumerate(jets_p): + dr = jh.delta_R(jp) + if dr < jetR / 2.: + dr_list.append((k, jp)) + if len(dr_list) != 1: + count2 += 1 + else: + k, jp = dr_list[0] + + if self.debug_level > 0: + pwarning('event', iev) + pinfo('matched jets: ch.h:', jchh.pt(), 'h:', jh.pt(), + 'p:', jp.pt(), 'dr:', dr) + + if not MPIon: + self.fill_jet_histograms(jetR, jp, jh, jchh) + if not self.no_tree: + self.fill_matched_jet_tree(tw, jetR, iev, jp, jh, jchh) + else: + self.fill_jet_histograms_MPI(jetR, jp, jh, jchh) + + #print(" |-> SD jet params z={0:10.3f} dR={1:10.3f} mu={2:10.3f}".format( + # sd_info.z, sd_info.dR, sd_info.mu)) + + if MPIon: + setattr(self, "count1_R%s_MPIon" % jetR_str, count1) + setattr(self, "count2_R%s_MPIon" % jetR_str, count2) + else: + setattr(self, "count1_R%s" % jetR_str, count1) + setattr(self, "count2_R%s" % jetR_str, count2) + + + #--------------------------------------------------------------- + # Fill jet tree with (unscaled/raw) matched parton/hadron tracks + #--------------------------------------------------------------- + def fill_matched_jet_tree(self, tw, jetR, iev, jp, jh, jch): + + tw.fill_branch('iev', iev) + tw.fill_branch('ch', jch) + tw.fill_branch('h', jh) + tw.fill_branch('p', jp) + + self.fill_unmatched_jet_tree(tw, jetR, iev, jp, level='p', save_iev=False) + self.fill_unmatched_jet_tree(tw, jetR, iev, jh, level='h', save_iev=False) + self.fill_unmatched_jet_tree(tw, jetR, iev, jch, level='ch', save_iev=False) + + #--------------------------------------------------------------- + # Fill jet tree with (unscaled/raw) unmatched parton/hadron tracks + #--------------------------------------------------------------- + def fill_unmatched_jet_tree(self, tw, jetR, iev, jet, level='ch', save_iev=True): + + if save_iev: + tw.fill_branch('iev', iev) + tw.fill_branch(level, jet) + + for obs in self.observable_list: + for i in range(len(self.obs_settings[obs])): + + obs_setting = self.obs_settings[obs][i] + grooming_setting = self.obs_grooming_settings[obs][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + label = ("R%s_%s" % (jetR, obs_label)).replace('.', '') + + jet_sd = None + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_sd = self.utils.groom(gshop, grooming_setting, jetR).pair() + + obs_val = None + + # Calculate angularities + if obs == "ang": + alpha = obs_setting + kappa = 1 + if grooming_setting: + obs_val = fjext.lambda_beta_kappa(jet, jet_sd, alpha, kappa, jetR) + else: + obs_val = fjext.lambda_beta_kappa(jet, alpha, kappa, jetR) + + # Jet mass histograms + elif obs == "mass": + if grooming_setting: + # Untagged jets -- record underflow value + obs_val = jet_sd.m() if jet_sd.has_constituents() else -1 + + else: + obs_val = jet.m() + + else: + raise ValueError("Observable not implemented in fill_unmatched_jet_tree") + + tw.fill_branch("%s_%s_%s" % (obs, level, label), obs_val) + + #--------------------------------------------------------------- + # Fill jet histograms for PYTHIA run-through before matching + #--------------------------------------------------------------- + def fill_unmatched_histograms(self, jetR, jet, level, MPI=False): + + # Observable-independent histograms + R_label = str(jetR).replace('.', '') + 'Scaled' + if MPI: + getattr(self, 'hJetPt_%s_MPIon_R%s' % (level, R_label)).Fill(jet.pt()) + getattr(self, 'hNconstit_Pt_%s_MPIon_R%s' % (level, R_label)).Fill(jet.pt(), len(jet.constituents())) + else: + getattr(self, 'hJetPt_%s_R%s' % (level, R_label)).Fill(jet.pt()) + getattr(self, 'hNconstit_Pt_%s_R%s' % (level, R_label)).Fill(jet.pt(), len(jet.constituents())) + + for obs in self.observable_list: + for i in range(len(self.obs_settings[obs])): + + obs_setting = self.obs_settings[obs][i] + grooming_setting = self.obs_grooming_settings[obs][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + label = ("R%s_%s" % (jetR, obs_label)).replace('.', '') + + if MPI: + h = getattr(self, 'h_%s_JetPt_%s_MPIon_%sScaled' % (obs, level, label)) + else: + h = getattr(self, 'h_%s_JetPt_%s_%sScaled' % (obs, level, label)) + + # Jet angularity histograms + if obs == "ang": + alpha = obs_setting + kappa = 1 + + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_sd = self.utils.groom(gshop, grooming_setting, jetR).pair() + h.Fill(jet.pt(), fjext.lambda_beta_kappa(jet, jet_sd, alpha, kappa, jetR)) + else: + h.Fill(jet.pt(), fjext.lambda_beta_kappa(jet, alpha, kappa, jetR)) + + # Jet mass histograms + elif obs == "mass": + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_sd = self.utils.groom(gshop, grooming_setting, jetR).pair() + if not jet_sd.has_constituents(): + # Untagged jet -- record underflow value + h.Fill(jet.pt(), -1) + else: + h.Fill(jet.pt(), jet_sd.m()) + + else: + h.Fill(jet.pt(), jet.m()) + + else: + raise ValueError("Observable not implemented in fill_unmatched_histograms") + + #--------------------------------------------------------------- + # Fill matched jet histograms + #--------------------------------------------------------------- + def fill_jet_histograms(self, jetR, jp, jh, jch): + + R_label = str(jetR).replace('.', '') + 'Scaled' + if self.level == None: + if jp.pt(): # prevent divide by 0 + getattr(self, 'hJetPtRes_R%s' % R_label).Fill(jp.pt(), (jp.pt() - jch.pt()) / jp.pt()) + getattr(self, 'hResponse_JetPt_R%s' % R_label).Fill(jp.pt(), jch.pt()) + + ''' + if 60 <= jch.pt() < 80: + getattr(self, 'hNconstit_Pt_ch_PtBinCH60-80_R%s' % R_label).Fill( + jch.pt(), len(jch.constituents())) + getattr(self, 'hNconstit_Pt_h_PtBinCH60-80_R%s' % R_label).Fill( + jh.pt(), len(jh.constituents())) + getattr(self, 'hNconstit_Pt_p_PtBinCH60-80_R%s' % R_label).Fill( + jp.pt(), len(jp.constituents())) + ''' + + # Fill observable histograms and response matrices + for alpha in self.alpha_list: + self.fill_RMs(jetR, alpha, jp, jh, jch) + + + #--------------------------------------------------------------- + # Fill jet response matrices + #--------------------------------------------------------------- + def fill_RMs(self, jetR, alpha, jp, jh, jch): + + for obs in self.observable_list: + for i in range(len(self.obs_settings[obs])): + + obs_setting = self.obs_settings[obs][i] + grooming_setting = self.obs_grooming_settings[obs][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + label = ("R%s_%s" % (jetR, obs_label)).replace('.', '') + + jp_sd, jh_sd, jch_sd = None, None, None + if grooming_setting: + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jp_sd = self.utils.groom(gshop_p, grooming_setting, jetR).pair() + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jh_sd = self.utils.groom(gshop_h, grooming_setting, jetR).pair() + gshop_ch = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) + jch_sd = self.utils.groom(gshop_ch, grooming_setting, jetR).pair() + + obs_p, obs_h, obs_ch = None, None, None + + # Calculate angularities + if obs == "ang": + alpha = obs_setting + kappa = 1 + if grooming_setting: + obs_p = fjext.lambda_beta_kappa(jp, jp_sd, alpha, kappa, jetR) + obs_h = fjext.lambda_beta_kappa(jh, jh_sd, alpha, kappa, jetR) + obs_ch = fjext.lambda_beta_kappa(jch, jch_sd, alpha, kappa, jetR) + else: + obs_p = fjext.lambda_beta_kappa(jp, alpha, kappa, jetR) + obs_h = fjext.lambda_beta_kappa(jh, alpha, kappa, jetR) + obs_ch = fjext.lambda_beta_kappa(jch, alpha, kappa, jetR) + + # Jet mass histograms + elif obs == "mass": + if grooming_setting: + # Untagged jets -- record underflow value + obs_p = jp_sd.m() if jp_sd.has_constituents() else -1 + obs_h = jh_sd.m() if jh_sd.has_constituents() else -1 + obs_ch = jch_sd.m() if jch_sd.has_constituents() else -1 + + else: + obs_p = jp.m() + obs_h = jh.m() + obs_ch = jch.m() + + else: + raise ValueError("Observable not implemented in fill_unmatched_histograms") + + + for level in ['p', 'h', 'ch']: + if self.level in [None, level]: + getattr(self, 'h_%s_JetPt_%s_%sScaled' % (obs, level, label)).Fill(jch.pt(), locals()['obs_'+level]) + + if self.level == None: + getattr(self, 'hResponse_%s_%sScaled' % (obs, label)).Fill(obs_p, obs_ch) + + ''' + # Lambda at p-vs-ch-level for various bins in ch jet pT + if 20 <= jch.pt() < 40: + getattr(self, 'hResponse_%s_PtBinCH20-40_%sScaled' % (obs, label)).Fill(lp, lch) + elif 40 <= jch.pt() < 60: + getattr(self, 'hResponse_%s_PtBinCH40-60_%sScaled' % (obs, label)).Fill(lp, lch) + elif 60 <= jch.pt() < 80: + getattr(self, 'hResponse_%s_PtBinCH60-80_%sScaled' % (obs, label)).Fill(lp, lch) + + # Phase space plots and annulus histograms, including those binned in ch jet pT + num_r = self.annulus_plots_num_r + ang_per_r_ch = [0] * num_r + for particle in jch.constituents(): + deltaR = particle.delta_R(jch) + getattr(self, 'hPhaseSpace_DeltaR_Pt_ch_%sScaled' % (obs, label)).Fill( + particle.pt(), deltaR / jetR) + + lambda_i = lambda_beta_kappa_i(particle, jch, jetR, alpha, 1) + getattr(self, 'hPhaseSpace_%s_DeltaR_ch_%sScaled' % (obs, label)).Fill(deltaR / jetR, lambda_i) + getattr(self, 'hPhaseSpace_%s_Pt_ch_%sScaled' % (obs, label)).Fill(particle.pt(), lambda_i) + + if 60 <= jch.pt() < 80: + getattr(self, 'hPhaseSpace_DeltaR_Pt_ch_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + particle.pt(), deltaR / jetR) + getattr(self, 'hPhaseSpace_%s_DeltaR_ch_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + deltaR / jetR, lambda_i) + getattr(self, 'hPhaseSpace_%s_Pt_ch_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + particle.pt(), lambda_i) + + ang_per_r_ch = [ang_per_r_ch[i] + lambda_i * + (deltaR <= ((i+1) * jetR * self.annulus_plots_max_x / num_r)) + for i in range(0, num_r, 1)] + + ang_per_r_p = [0] * num_r + for particle in jp.constituents(): + deltaR = particle.delta_R(jp) + getattr(self, 'hPhaseSpace_DeltaR_Pt_p_%sScaled' % (obs, label)).Fill( + particle.pt(), deltaR / jetR) + + lambda_i = lambda_beta_kappa_i(particle, jp, jetR, alpha, 1) + getattr(self, 'hPhaseSpace_%s_DeltaR_p_%sScaled' % (obs, label)).Fill(deltaR / jetR, lambda_i) + getattr(self, 'hPhaseSpace_%s_Pt_p_%sScaled' % (obs, label)).Fill(particle.pt(), lambda_i) + + if 60 <= jch.pt() < 80: + getattr(self, 'hPhaseSpace_DeltaR_Pt_p_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + particle.pt(), deltaR / jetR) + getattr(self, 'hPhaseSpace_%s_DeltaR_p_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + deltaR / jetR, lambda_i) + getattr(self, 'hPhaseSpace_%s_Pt_p_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + particle.pt(), lambda_i) + + ang_per_r_p = [ang_per_r_p[i] + lambda_i * + (deltaR <= ((i+1) * jetR * self.annulus_plots_max_x / num_r)) + for i in range(0, num_r, 1)] + + for i in range(0, num_r, 1): + getattr(self, 'hAnnulus_%s_p_%sScaled' % (obs, label)).Fill( + (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_p[i] / (lp + 1e-11)) + getattr(self, 'hAnnulus_%s_ch_%sScaled' % (obs, label)).Fill( + (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_ch[i] / (lch + 1e-11)) + if 60 <= jch.pt() < 80: + getattr(self, 'hAnnulus_%s_p_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_p[i] / (lp + 1e-11)) + getattr(self, 'hAnnulus_%s_ch_PtBinCH60-80_%sScaled' % (obs, label)).Fill( + (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_ch[i] / (lch + 1e-11)) + ''' + + # Residual plots (with and without divisor in y-axis) + getattr(self, "h_%sDiff_JetPt_%sScaled" % (obs, label)).Fill(jch.pt(), obs_p - obs_ch) + if obs_p: # prevent divide by 0 + getattr(self, "h_%sResidual_JetPt_%sScaled" % (obs, label)).Fill(jp.pt(), (obs_p - obs_ch) / obs_p) + + # 4D response matrices for "forward folding" to ch level + x = ([jch.pt(), jp.pt(), obs_ch, obs_p]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_ch_%sScaled' % (obs, label)).Fill(x_array) + + x = ([jh.pt(), jp.pt(), obs_h, obs_p]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_h_%sScaled' % (obs, label)).Fill(x_array) + + #--------------------------------------------------------------- + # Fill jet histograms for MPI (which are just the H-->CH RMs) + #--------------------------------------------------------------- + def fill_jet_histograms_MPI(self, jetR, jp, jh, jch): + + for obs in self.observable_list: + for i in range(len(self.obs_settings[obs])): + + obs_setting = self.obs_settings[obs][i] + grooming_setting = self.obs_grooming_settings[obs][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + jp_sd, jh_sd, jch_sd = None, None, None + if grooming_setting: + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jp_sd = self.utils.groom(gshop_p, grooming_setting, jetR).pair() + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jh_sd = self.utils.groom(gshop_h, grooming_setting, jetR).pair() + gshop_ch = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) + jch_sd = self.utils.groom(gshop_ch, grooming_setting, jetR).pair() + + obs_p, obs_h, obs_ch = None, None, None + + # Calculate angularities + if obs == "ang": + alpha = obs_setting + kappa = 1 + if grooming_setting: + obs_p = fjext.lambda_beta_kappa(jp, jp_sd, alpha, kappa, jetR) + obs_h = fjext.lambda_beta_kappa(jh, jh_sd, alpha, kappa, jetR) + obs_ch = fjext.lambda_beta_kappa(jch, jch_sd, alpha, kappa, jetR) + else: + obs_p = fjext.lambda_beta_kappa(jp, alpha, kappa, jetR) + obs_h = fjext.lambda_beta_kappa(jh, alpha, kappa, jetR) + obs_ch = fjext.lambda_beta_kappa(jch, alpha, kappa, jetR) + + # Jet mass histograms + elif obs == "mass": + if grooming_setting: + # Untagged jets -- record underflow value + obs_p = jp_sd.m() if jp_sd.has_constituents() else -1 + obs_h = jh_sd.m() if jh_sd.has_constituents() else -1 + obs_ch = jch_sd.m() if jch_sd.has_constituents() else -1 + + else: + obs_p = jp.m() + obs_h = jh.m() + obs_ch = jch.m() + + else: + raise ValueError("Observable not implemented in fill_unmatched_histograms") + + # 4D response matrices for "forward folding" from h to ch level + x = ([jch.pt(), jh.pt(), obs_ch, obs_h]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_Fnp_%sScaled' % (obs, label)).Fill(x_array) + + #--------------------------------------------------------------- + # Initiate scaling of all histograms and print final simulation info + #--------------------------------------------------------------- + def scale_print_final_info(self, pythia, pythia_MPI): + + # Scale all jet histograms by the appropriate factor from generated cross section + # and the number of accepted events + if not self.no_scale: + scale_f = pythia.info.sigmaGen() / self.hNevents.GetBinContent(1) + print("Weight MPIoff histograms by (cross section)/(N events) =", scale_f) + MPI_scale_f = pythia_MPI.info.sigmaGen() / self.hNeventsMPI.GetBinContent(1) + print("Weight MPIon histograms by (cross section)/(N events) =", MPI_scale_f) + self.scale_jet_histograms(scale_f, MPI_scale_f) + print() + + print("N total final MPI-off events:", int(self.hNevents.GetBinContent(1)), "with", + int(pythia.info.nAccepted() - self.hNevents.GetBinContent(1)), + "events rejected at hadronization step") + self.hNevents.SetBinError(1, 0) + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + print(("For R=%s: %i jets cut at first match criteria; " + \ + "%i jets cut at second match criteria.") % + (str(jetR), count1, count2)) + print() + + + #--------------------------------------------------------------- + # Scale all jet histograms by sigma/N + #--------------------------------------------------------------- + def scale_jet_histograms(self, scale_f, MPI_scale_f): + + for jetR in self.jetR_list: + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + for h in getattr(self, hist_list_name): + h.Scale(scale_f) + + hist_list_MPIon_name = "hist_list_MPIon_R%s" % str(jetR).replace('.', '') + for h in getattr(self, hist_list_MPIon_name): + h.Scale(MPI_scale_f) + + +################################################################ +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', + prog=os.path.basename(__file__)) + pyconf.add_standard_pythia_args(parser) + # Could use --py-seed + parser.add_argument('--user-seed', help='PYTHIA starting seed', default=1111, type=int) + parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', + help='Output directory for generated ROOT file(s)') + parser.add_argument('--tree-output-fname', default="AnalysisResults.root", type=str, + help="Filename for the (unscaled) generated particle ROOT TTree") + parser.add_argument('--no-tree', default=False, action='store_true', + help="Do not save tree of particle information, only create histograms") + parser.add_argument('--no-match-level', help="Save simulation for only one level with " + \ + "no matching. Options: 'p', 'h', 'ch'", default=None, type=str) + parser.add_argument('--no-scale', help="Turn off rescaling all histograms by cross section / N", + action='store_true', default=False) + parser.add_argument('-c', '--config_file', action='store', type=str, default='config/angularity.yaml', + help="Path of config file for observable configurations") + args = parser.parse_args() + + if args.no_match_level not in [None, 'p', 'h', 'ch']: + print("ERROR: Unrecognized type %s. Please use 'p', 'h', or 'ch'" % args.type_only) + exit(1) + + # If invalid configFile is given, exit + if not os.path.exists(args.config_file): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + # Use PYTHIA seed for event generation + if args.user_seed < 0: + args.user_seed = 1111 + + # Have at least 1 event + if args.nev < 1: + args.nev = 1 + + if args.py_noMPI: + print("\033[91m%s\033[00m" % "WARNING: py-noMPI flag ignored for this program") + time.sleep(3) + print() + + process = pythia_parton_hadron(config_file=args.config_file, output_dir=args.output_dir, args=args) + process.pythia_parton_hadron(args) diff --git a/pyjetty/alice_analysis/process/user/ang/pythia_quark_gluon_ang.py b/pyjetty/alice_analysis/process/user/ang/pythia_quark_gluon_ang.py new file mode 100755 index 000000000..5ee81b7be --- /dev/null +++ b/pyjetty/alice_analysis/process/user/ang/pythia_quark_gluon_ang.py @@ -0,0 +1,441 @@ +#!/usr/bin/env python +''' +Script for looking at the quark vs gluon dependence of substructure observables +Author: Ezra Lesser (elesser@berkeley.edu) +''' + +from __future__ import print_function + +# Fastjet via python (from external library heppy) +import fastjet as fj +import fjcontrib +import fjext + +import ROOT + +import tqdm +import yaml +import copy +import argparse +import os +import array +import numpy as np + +from pyjetty.mputils import * + +from heppy.pythiautils import configuration as pyconf +import pythia8 +import pythiafjext +import pythiaext + +from pyjetty.alice_analysis.process.base import process_base + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) +# Automatically set Sumw2 when creating new histograms +ROOT.TH1.SetDefaultSumw2() +ROOT.TH2.SetDefaultSumw2() + +################################################################ +class PythiaQuarkGluon(process_base.ProcessBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, args=None, **kwargs): + + super(PythiaQuarkGluon, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + # Call base class initialization + process_base.ProcessBase.initialize_config(self) + + # Read config file + with open(self.config_file, 'r') as stream: + config = yaml.safe_load(stream) + + if not os.path.exists(self.output_dir): + os.makedirs(self.output_dir) + + self.jetR_list = config["jetR"] + + self.user_seed = args.user_seed + self.nev = args.nev + + self.noMPI = (bool)(1-args.MPIon) + self.noISR = (bool)(1-args.ISRon) + + # PDG ID values for quarks and gluons + self.quark_pdg_ids = [1, 2, 3, 4, 5, 6, 7, 8] + self.charm_pdg_ids = [4] + self.gluon_pdg_ids = [9, 21] + + # hadron level - ALICE tracking restriction + self.max_eta_hadron = 0.9 + + self.min_leading_track_pT = config["min_leading_track_pT"] if "min_leading_track_pT" in config else None + + self.pt_bins = array.array('d', list(range(5, 100, 5)) + list(range(100, 210, 10))) + self.obs_bins_ang = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), + np.linspace(0.11, 0.8, 70))) + self.obs_bins_mass = np.concatenate( + (np.linspace(0, 0.9, 10), np.linspace(1, 9.8, 45), np.linspace(10, 14.5, 10), + np.linspace(15, 19, 5), np.linspace(20, 60, 9))) + + self.observable_list = config['process_observables'] + self.obs_settings = {} + self.obs_grooming_settings = {} + self.obs_names = {} + for observable in self.observable_list: + + obs_config_dict = config[observable] + obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + self.obs_settings[observable] = self.utils.obs_settings(observable, obs_config_dict, obs_subconfig_list) + self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) + + self.obs_names[observable] = obs_config_dict["common_settings"]["xtitle"] + + #--------------------------------------------------------------- + # Main processing function + #--------------------------------------------------------------- + def pythia_quark_gluon(self, args): + + # Create ROOT TTree file for storing raw PYTHIA particle information + outf_path = os.path.join(self.output_dir, args.tree_output_fname) + outf = ROOT.TFile(outf_path, 'recreate') + outf.cd() + + # Initialize response histograms + self.initialize_hist() + + pinfo('user seed for pythia', self.user_seed) + mycfg = ['Random:setSeed=on', 'Random:seed={}'.format(self.user_seed)] + mycfg.append('HadronLevel:all=off') + + # print the banner first + fj.ClusterSequence.print_banner() + print() + + # ------------------------------- + # Setting MPIs and ISRs + print('Will run no MPI:',self.noMPI) + print('Will run no ISR:',self.noISR) + setattr(args, "py_noMPI", self.noMPI) + setattr(args, "py_noISR", self.noISR) + # ------------------------------- + + pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) + + self.init_jet_tools() + self.calculate_events(pythia) + pythia.stat() + print() + + self.scale_print_final_info(pythia) + + outf.Write() + outf.Close() + + self.save_output_objects() + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_hist(self): + + self.hNevents = ROOT.TH1I("hNevents", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + + for jetR in self.jetR_list: + + # Store a list of all the histograms just so that we can rescale them later + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + setattr(self, hist_list_name, []) + + R_label = str(jetR).replace('.', '') + 'Scaled' + + for observable in self.observable_list: + # Should only be two: observable == "ang" or "mass" + if observable != "ang" and observable != "mass": + raise ValueError("Observable %s is not implemented in this script" % observable) + + obs_name = self.obs_names[observable] + obs_bins = getattr(self, "obs_bins_" + observable) + # Use more finely binned pT bins for TH2s than for the RMs + pt_bins = array.array('d', list(range(0, 201, 1))) + + # Loop over subobservable (alpha value) + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + for parton_type in ["quark", "charm", "gluon"]: + + name = ('h_%s_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h_%s_JetPt_%s_R%s' % (observable, parton_type, jetR)) + h = ROOT.TH2F(name, name, len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('#it{p}_{T,%s}^{ch jet}' % (parton_type[0] + "-init")) + h.GetYaxis().SetTitle(obs_name + '^{%s}' % (parton_type[0] + "-init")) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + #--------------------------------------------------------------- + # Initiate jet defs, selectors, and sd (if required) + #--------------------------------------------------------------- + def init_jet_tools(self): + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + # set up our jet definition and a jet selector + jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) + setattr(self, "jet_def_R%s" % jetR_str, jet_def) + print(jet_def) + + pwarning('max eta for particles after hadronization set to', self.max_eta_hadron) + parts_selector_h = fj.SelectorAbsEtaMax(self.max_eta_hadron) + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + jet_selector = fj.SelectorPtMin(5.0) & fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + #jet_selector = fj.SelectorPtMin(0.) & fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + setattr(self, "jet_selector_R%s" % jetR_str, jet_selector) + + count1 = 0 # Number of partonic parents which match to >1 ch-jets + setattr(self, "count1_R%s" % jetR_str, count1) + count2 = 0 # Number of partonic parents which match to zero ch-jets + setattr(self, "count2_R%s" % jetR_str, count2) + + #--------------------------------------------------------------- + # Calculate events and pass information on to jet finding + #--------------------------------------------------------------- + def calculate_events(self, pythia): + + iev = 0 # Event loop count + + while iev < self.nev: + if not pythia.next(): + continue + + self.parents = [] + self.event = pythia.event + print(self.event) + fs_parton_5 = fj.PseudoJet(pythia.event[5].px(), pythia.event[5].py(), pythia.event[5].pz(), pythia.event[5].e()) + fs_parton_6 = fj.PseudoJet(pythia.event[6].px(), pythia.event[6].py(), pythia.event[6].pz(), pythia.event[6].e()) + self.parents = [fs_parton_5, fs_parton_6] # parent partons in dijet + + # Save PDG code of the parent partons + self.parent_ids = [pythia.event[5].id(), pythia.event[6].id()] + + # parton level + #parts_pythia_p = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + hstatus = pythia.forceHadronLevel() + if not hstatus: + continue + + # full-hadron level + #parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + # charged-hadron level + parts_pythia_hch = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + + # Some "accepted" events don't survive hadronization step -- keep track here + self.hNevents.Fill(0) + self.find_jets_fill_histograms(parts_pythia_hch, iev) + + iev += 1 + + #--------------------------------------------------------------- + # Find primordial parent + #--------------------------------------------------------------- + def primordial_parent(self,p): + parent1 = parent2 = -10 + while p > 6: + parent1 = self.event[p].mother1() + parent2 = self.event[p].mother2() + if parent1 != parent2: + p = max(parent1,parent2) + else: + p = parent1 + return p + + #--------------------------------------------------------------- + # Find jets, do matching between levels, and fill histograms + #--------------------------------------------------------------- + def find_jets_fill_histograms(self, parts_pythia_hch, iev): + # Loop over jet radii + for jetR in self.jetR_list: + + jetR_str = str(jetR).replace('.', '') + jet_selector = getattr(self, "jet_selector_R%s" % jetR_str) + jet_def = getattr(self, "jet_def_R%s" % jetR_str) + + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + + # Get the jets at different levels + #jets_p = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_p ))) # parton level + #jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_h ))) # full hadron level + jets_ch = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_hch))) # charged hadron level + + R_label = str(jetR).replace('.', '') + 'Scaled' + + # Find the charged jet closest to the axis of the original parton + # Require that the match is within some small angle, and that it is unique + jet_matching_distance = 0.6 # Match jets with deltaR < jet_matching_distance*jetR + self.parent0match, self.parent1match = None, None + for i_jch, jch in enumerate(jets_ch): + # Do constituent pT cut + if self.min_leading_track_pT and not \ + self.utils.is_truth_jet_accepted(jch, self.min_leading_track_pT): + continue + for i_parent, parent in enumerate(self.parents): + parentmatch_name = "parent%imatch" % i_parent + if jch.delta_R(parent) < jet_matching_distance * jetR: + match = getattr(self, parentmatch_name) + if not match: + setattr(self, parentmatch_name, jch) + else: # Already found a match + # Set flag value so that we know to ignore this one + setattr(self, parentmatch_name, 0) + + # If we have matches, fill histograms + for i_parent, parent in enumerate(self.parents): + jet = getattr(self, "parent%imatch" % i_parent) + if not jet: + if jet == 0: + # More than one match -- take note and continue + count1 += 1 + continue + else: # jet == None + # No matches -- take note and continue + count2 += 1 + continue + + # One unique match + # Identify the histograms which need to be filled + parton_id = self.parent_ids[i_parent] + parton_types = [] + if parton_id in self.quark_pdg_ids: + parton_types += ["quark"] + if parton_id in self.charm_pdg_ids: + parton_types += ["charm"] + elif parton_id in self.gluon_pdg_ids: + parton_types += ["gluon"] + + # If parent parton not identified, skip for now + if not len(parton_types): + continue + + # Fill histograms + for observable in self.observable_list: + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + # Groom jet, if applicable + jet_groomed_lund = None + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) + if not jet_groomed_lund: + continue + + obs = self.calculate_observable( + observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet.pt()) + + for parton_type in parton_types: + getattr(self, ('h_%s_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h_%s_JetPt_%s_R%s' % (observable, parton_type, jetR))).Fill( + jet.pt(), obs) + + setattr(self, "count1_R%s" % jetR_str, count1) + setattr(self, "count2_R%s" % jetR_str, count2) + + #--------------------------------------------------------------- + # Calculate the observable given a jet + #--------------------------------------------------------------- + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + if observable == "ang": + + return fjext.lambda_beta_kappa(jet, jet_groomed_lund.pair(), obs_setting, 1, jetR) \ + if grooming_setting else fjext.lambda_beta_kappa(jet, obs_setting, 1, jetR) + + elif observable == "mass": + + if grooming_setting: + j_groomed = jet_groomed_lund.pair() + if not j_groomed.has_constituents(): + # Untagged jet -- record underflow value + return -1 + else: + return j_groomed.m() + + return jet.m() + + # Should not be any other observable + raise ValueError("Observable %s not implemented" % observable) + + #--------------------------------------------------------------- + # Initiate scaling of all histograms and print final simulation info + #--------------------------------------------------------------- + def scale_print_final_info(self, pythia): + # Scale all jet histograms by the appropriate factor from generated cross section and the number of accepted events + scale_f = pythia.info.sigmaGen() / self.hNevents.GetBinContent(1) + + for jetR in self.jetR_list: + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + for h in getattr(self, hist_list_name): + h.Scale(scale_f) + + print("N total final events:", int(self.hNevents.GetBinContent(1)), "with", + int(pythia.info.nAccepted() - self.hNevents.GetBinContent(1)), + "events rejected at hadronization step") + self.hNevents.SetBinError(1, 0) + +################################################################ +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', + prog=os.path.basename(__file__)) + pyconf.add_standard_pythia_args(parser) + # Could use --py-seed + parser.add_argument('--user-seed', help='PYTHIA starting seed', default=1111, type=int) + parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', + help='Output directory for generated ROOT file(s)') + parser.add_argument('--tree-output-fname', default="AnalysisResults.root", type=str, + help="Filename for the (unscaled) generated particle ROOT TTree") + parser.add_argument('--MPIon', action='store', type=int, default=1, + help="MPI on or off") + parser.add_argument('--ISRon', action='store', type=int, default=1, + help="ISR on or off") + parser.add_argument('-c', '--config_file', action='store', type=str, default='config/angularity.yaml', + help="Path of config file for observable configurations") + + args = parser.parse_args() + + # If invalid configFile is given, exit + if not os.path.exists(args.config_file): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + # Use PYTHIA seed for event generation + if args.user_seed < 0: + args.user_seed = 1111 + + # Have at least 1 event + if args.nev < 1: + args.nev = 1 + + process = PythiaQuarkGluon(config_file=args.config_file, output_dir=args.output_dir, args=args) + process.pythia_quark_gluon(args) diff --git a/pyjetty/alice_analysis/process/user/ang_pp/pythia_parton_hadron.py b/pyjetty/alice_analysis/process/user/ang_pp/pythia_parton_hadron.py deleted file mode 100755 index d68363f92..000000000 --- a/pyjetty/alice_analysis/process/user/ang_pp/pythia_parton_hadron.py +++ /dev/null @@ -1,1265 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import fastjet as fj -import fjcontrib -import fjext - -import ROOT - -import tqdm -import yaml -import copy -import argparse -import os - -from pyjetty.mputils import * - -from heppy.pythiautils import configuration as pyconf -import pythia8 -import pythiafjext -import pythiaext - -from pyjetty.alice_analysis.process.base import process_base -from pyjetty.alice_analysis.process.user.ang_pp.helpers import lambda_beta_kappa_i - -from array import array -import numpy as np - -# Prevent ROOT from stealing focus when plotting -ROOT.gROOT.SetBatch(True) -# Automatically set Sumw2 when creating new histograms -ROOT.TH1.SetDefaultSumw2() - -################################################################ -class pythia_parton_hadron(process_base.ProcessBase): - - #--------------------------------------------------------------- - # Constructor - #--------------------------------------------------------------- - def __init__(self, input_file='', config_file='', output_dir='', - debug_level=0, args=None, **kwargs): - - super(pythia_parton_hadron, self).__init__( - input_file, config_file, output_dir, debug_level, **kwargs) - - self.initialize_config(args) - - #--------------------------------------------------------------- - # Main processing function - #--------------------------------------------------------------- - def pythia_parton_hadron(self, args): - - # Create ROOT TTree file for storing raw PYTHIA particle information - outf_path = os.path.join(self.output_dir, args.tree_output_fname) - outf = ROOT.TFile(outf_path, 'recreate') - outf.cd() - - # Initialize response histograms - self.initialize_hist() - - pinfo('user seed for pythia', self.user_seed) - # mycfg = ['PhaseSpace:pThatMin = 100'] - mycfg = ['Random:setSeed=on', 'Random:seed={}'.format(self.user_seed)] - mycfg.append('HadronLevel:all=off') - - # PYTHIA instance with MPI off - setattr(args, "py_noMPI", True) - pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) - - # print the banner first - fj.ClusterSequence.print_banner() - print() - - self.init_jet_tools() - self.calculate_events(pythia) - pythia.stat() - print() - - # PYTHIA instance with MPI on - setattr(args, "py_noMPI", False) - pythia_MPI = pyconf.create_and_init_pythia_from_args(args, mycfg) - self.calculate_events(pythia_MPI, MPIon=True) - print() - - if not self.no_tree: - for jetR in self.jetR_list: - getattr(self, "tw_R%s" % str(jetR).replace('.', '')).fill_tree() - - self.scale_print_final_info(pythia, pythia_MPI) - - outf.Write() - outf.Close() - - self.save_output_objects() - - #--------------------------------------------------------------- - # Initialize config file into class members - #--------------------------------------------------------------- - def initialize_config(self, args): - - # Call base class initialization - process_base.ProcessBase.initialize_config(self) - - # Read config file - with open(self.config_file, 'r') as stream: - config = yaml.safe_load(stream) - - if not os.path.exists(self.output_dir): - os.makedirs(self.output_dir) - - # Defaults to None if not in use - self.level = args.no_match_level - - self.jetR_list = config["jetR"] - self.beta_list = config["betas"] - - # SoftDrop parameters - self.use_SD = True # Change this to use SD - self.sd_beta = config["sd_beta"] - self.sd_zcut = config["sd_zcut"] - self.grooming_settings = [{'sd': [self.sd_zcut, self.sd_beta]}] # self.utils.grooming_settings - self.grooming_labels = [self.utils.grooming_label(gs) for gs in self.grooming_settings] - - self.user_seed = args.user_seed - self.nev = args.nev - - self.n_pt_bins = config["n_pt_bins"] - self.pt_limits = config["pt_limits"] - self.n_lambda_bins = config['n_lambda_bins'] - self.lambda_limits = config['lambda_limits'] - - # Manually added binnings for RM and scaling histograms - self.pt_bins = array('d', list(range(10, 50, 5)) + list(range(50, 210, 10))) - self.obs_bins = np.concatenate((np.linspace(0, 0.0009, 10), np.linspace(0.001, 0.009, 9), - np.linspace(0.01, 0.1, 19), np.linspace(0.11, 1., 90))) - - # hadron level - ALICE tracking restriction - self.max_eta_hadron = 0.9 - - # Whether or not to rescale final jet histograms based on sigma/N - self.no_scale = args.no_scale - - # Whether or not to save particle info in raw tree structure - self.no_tree = args.no_tree - - - #--------------------------------------------------------------- - # Initialize histograms - #--------------------------------------------------------------- - def initialize_hist(self): - - self.hNevents = ROOT.TH1I("hNevents", 'Number accepted events (unscaled)', 2, -0.5, 1.5) - self.hNeventsMPI = ROOT.TH1I("hNeventsMPI", 'Number accepted events (unscaled)', 2, -0.5, 1.5) - - for jetR in self.jetR_list: - - # Store a list of all the histograms just so that we can rescale them later - hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') - setattr(self, hist_list_name, []) - hist_list_name_MPIon = "hist_list_MPIon_R%s" % str(jetR).replace('.', '') - setattr(self, hist_list_name_MPIon, []) - - R_label = str(jetR).replace('.', '') + 'Scaled' - - if self.level in [None, 'ch']: - name = 'hJetPt_ch_R%s' % R_label - h = ROOT.TH1F(name, name+';p_{T}^{ch jet};#frac{dN}{dp_{T}^{ch jet}};', 300, 0, 300) - h.Sumw2() # enables calculation of errors - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hNconstit_Pt_ch_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) - h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') - h.GetYaxis().SetTitle('#it{N}_{constit}^{ch jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.level in [None, 'h']: - name = 'hJetPt_h_R%s' % R_label - h = ROOT.TH1F(name, name+';p_{T}^{jet, h};#frac{dN}{dp_{T}^{jet, h}};', 300, 0, 300) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hNconstit_Pt_h_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) - h.GetXaxis().SetTitle('#it{p}_{T}^{h jet}') - h.GetYaxis().SetTitle('#it{N}_{constit}^{h jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.level in [None, 'p']: - name = 'hJetPt_p_R%s' % R_label - h = ROOT.TH1F(name, name+';p_{T}^{jet, parton};#frac{dN}{dp_{T}^{jet, parton}};', - 300, 0, 300) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hNconstit_Pt_p_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) - h.GetXaxis().SetTitle('#it{p}_{T}^{p jet}') - h.GetYaxis().SetTitle('#it{N}_{constit}^{p jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.level == None: - name = 'hJetPtRes_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 1.) - h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') - h.GetYaxis().SetTitle( - '#frac{#it{p}_{T}^{parton jet}-#it{p}_{T}^{ch jet}}{#it{p}_{T}^{parton jet}}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hResponse_JetPt_R%s' % R_label - h = ROOT.TH2F(name, name, 200, 0, 200, 200, 0, 200) - h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') - h.GetYaxis().SetTitle('#it{p}_{T}^{ch jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - ''' - # Jet multiplicity for matched jets with a cut at ch-jet level - name = 'hNconstit_Pt_ch_PtBinCH60-80_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) - h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') - h.GetYaxis().SetTitle('#it{N}_{constit}^{ch jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hNconstit_Pt_h_PtBinCH60-80_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) - h.GetXaxis().SetTitle('#it{p}_{T}^{h jet}') - h.GetYaxis().SetTitle('#it{N}_{constit}^{h jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hNconstit_Pt_p_PtBinCH60-80_R%s' % R_label - h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) - h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') - h.GetYaxis().SetTitle('#it{N}_{constit}^{parton jet}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - ''' - - for beta in self.beta_list: - - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') - - if self.level in [None, 'ch']: - name = 'hAng_JetPt_ch_%sScaled' % label - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hAng_JetPt_ch_MPIon_%sScaled' % label - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name_MPIon).append(h) - - if self.use_SD: - # SoftDrop groomed jet histograms for MPI scaling - for gl in self.grooming_labels: - name = 'hAng_JetPt_ch_%s_%sScaled' % (label, gl) - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hAng_JetPt_ch_MPIon_%s_%sScaled' % (label, gl) - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{ch jet}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{ch}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name_MPIon).append(h) - - if self.level in [None, 'h']: - name = 'hAng_JetPt_h_%sScaled' % label - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{jet, h}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{h}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.use_SD: - for gl in self.grooming_labels: - name = 'hAng_JetPt_h_%s_%sScaled' % (label, gl) - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{jet, h}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{h}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - - if self.level in [None, 'p']: - name = 'hAng_JetPt_p_%sScaled' % label - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{parton}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.use_SD: - for gl in self.grooming_labels: - name = 'hAng_JetPt_p_%s_%sScaled' % (label, gl) - h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, - len(self.obs_bins)-1, self.obs_bins) - h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{dN}{d#lambda_{#beta=%s}^{parton}}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.level == None: - name = 'hResponse_ang_%sScaled' % label - h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.use_SD: - for gl in self.grooming_labels: - name = 'hResponse_ang_%s_%sScaled' % (label, gl) - h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - ''' - name = 'hResponse_ang_PtBinCH20-40_%sScaled' % label - h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hResponse_ang_PtBinCH40-60_%sScaled' % label - h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hResponse_ang_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) - h.GetXaxis().SetTitle('#lambda_{#beta=%s}^{parton}' % beta) - h.GetYaxis().SetTitle('#lambda_{#beta=%s}^{ch}' % beta) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - # Phase space plots integrated over all pT bins - name = 'hPhaseSpace_DeltaR_Pt_ch_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - 150, 0, 1.5) - h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') - h.GetYaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_DeltaR_ch_%sScaled' % label - h = ROOT.TH2F(name, name, 150, 0, 1.5, - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_Pt_ch_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_DeltaR_Pt_p_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - 150, 0, 1.5) - h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') - h.GetYaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_DeltaR_p_%sScaled' % label - h = ROOT.TH2F(name, name, 150, 0, 1.5, - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_Pt_p_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - # Phase space plots binned in ch jet pT - name = 'hPhaseSpace_DeltaR_Pt_ch_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - 150, 0, 1.5) - h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') - h.GetYaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_DeltaR_Pt_p_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - 150, 0, 1.5) - h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') - h.GetYaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_DeltaR_ch_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, 150, 0, 1.5, - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(#Delta R_{i})_{ch jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_DeltaR_p_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, 150, 0, 1.5, - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(#Delta R_{i})_{parton jet} / R') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_Pt_ch_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(p_{T, i})_{ch jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{ch jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hPhaseSpace_ang_Pt_p_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, self.n_pt_bins, self.pt_limits[0], self.pt_limits[1], - self.n_lambda_bins, self.lambda_limits[0], self.lambda_limits[1]) - h.GetXaxis().SetTitle('(p_{T, i})_{parton jet}') - h.GetYaxis().SetTitle('(#lambda_{#beta=%s, i})_{parton jet}' % str(beta)) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - # Annulus plots for amount of lambda contained within some r < R - self.annulus_plots_num_r = 150 - self.annulus_plots_max_x = 1.5 - low_bound = self.annulus_plots_max_x / self.annulus_plots_num_r / 2. - up_bound = self.annulus_plots_max_x + low_bound - - name = 'hAnnulus_ang_ch_%sScaled' % label - h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, - 100, 0, 1.) - h.GetXaxis().SetTitle('(#it{r} / #it{R})_{ch jet}') - h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{ch jet}') % (str(beta), str(beta))) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hAnnulus_ang_ch_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, - 100, 0, 1.) - h.GetXaxis().SetTitle('(#it{r} / #it{R})_{ch jet}') - h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{ch jet}') % (str(beta), str(beta))) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hAnnulus_ang_p_%sScaled' % label - h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, - 100, 0, 1.) - h.GetXaxis().SetTitle('(#it{r} / #it{R})_{parton jet}') - h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{parton jet}') % (str(beta), str(beta))) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = 'hAnnulus_ang_p_PtBinCH60-80_%sScaled' % label - h = ROOT.TH2F(name, name, self.annulus_plots_num_r, low_bound, up_bound, - 100, 0, 1.) - h.GetXaxis().SetTitle('(#it{r} / #it{R})_{parton jet}') - h.GetYaxis().SetTitle( - ('(#frac{#lambda_{#beta=%s}(#it{r})}' + \ - '{#lambda_{#beta=%s}(#it{R})})_{parton jet}') % (str(beta), str(beta))) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - ''' - - name = "hAngResidual_JetPt_%sScaled" % label - h = ROOT.TH2F(name, name, 300, 0, 300, 200, -3., 1.) - h.GetXaxis().SetTitle('p_{T}^{jet, parton}') - h.GetYaxis().SetTitle('#frac{#lambda_{#beta}^{jet, parton}-#lambda_{#beta}' + \ - '^{ch jet}}{#lambda_{#beta}^{jet, parton}}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - name = "hAngDiff_JetPt_%sScaled" % label - h = ROOT.TH2F(name, name, 300, 0, 300, 200, -2., 2.) - h.GetXaxis().SetTitle('#it{p}_{T}^{jet, ch}') - h.GetYaxis().SetTitle('#it{#lambda}_{#it{#beta}}^{jet, parton}-' + \ - '#it{#lambda}_{#it{#beta}}^{jet, ch}') - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - # Create THn of response - dim = 4 - title = ['p_{T}^{ch jet}', 'p_{T}^{parton jet}', - '#lambda_{#beta}^{ch}', '#lambda_{#beta}^{parton}'] - nbins = [len(self.pt_bins)-1, len(self.pt_bins)-1, - len(self.obs_bins)-1, len(self.obs_bins)-1] - min_li = [self.pt_bins[0], self.pt_bins[0], - self.obs_bins[0], self.obs_bins[0] ] - max_li = [self.pt_bins[-1], self.pt_bins[-1], - self.obs_bins[-1], self.obs_bins[-1] ] - - name = 'hResponse_JetPt_ang_ch_%sScaled' % label - nbins = (nbins) - xmin = (min_li) - xmax = (max_li) - nbins_array = array('i', nbins) - xmin_array = array('d', xmin) - xmax_array = array('d', xmax) - h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) - for i in range(0, dim): - h.GetAxis(i).SetTitle(title[i]) - if i == 0 or i == 1: - h.SetBinEdges(i, self.pt_bins) - else: # i == 2 or i == 3 - h.SetBinEdges(i, self.obs_bins) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.use_SD: - # SoftDrop groomed jet response matrices - for gl in self.grooming_labels: - name = 'hResponse_JetPt_ang_ch_%s_%sScaled' % (label, gl) - h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) - for i in range(0, dim): - h.GetAxis(i).SetTitle(title[i]) - if i == 0 or i == 1: - h.SetBinEdges(i, self.pt_bins) - else: # i == 2 or i == 3 - h.SetBinEdges(i, self.obs_bins) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - # Another set of THn for full hadron folding - title = ['p_{T}^{h jet}', 'p_{T}^{parton jet}', - '#lambda_{#beta}^{h}', '#lambda_{#beta}^{parton}'] - - name = 'hResponse_JetPt_ang_h_%sScaled' % label - h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) - for i in range(0, dim): - h.GetAxis(i).SetTitle(title[i]) - if i == 0 or i == 1: - h.SetBinEdges(i, self.pt_bins) - else: # i == 2 or i == 3 - h.SetBinEdges(i, self.obs_bins) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - if self.use_SD: - # SoftDrop groomed jet response matrices - for gl in self.grooming_labels: - name = 'hResponse_JetPt_ang_h_%s_%sScaled' % (label, gl) - h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) - for i in range(0, dim): - h.GetAxis(i).SetTitle(title[i]) - if i == 0 or i == 1: - h.SetBinEdges(i, self.pt_bins) - else: # i == 2 or i == 3 - h.SetBinEdges(i, self.obs_bins) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name).append(h) - - # Finally, a set of THn for folding H --> CH (with MPI on) - title = ['p_{T}^{ch jet}', 'p_{T}^{h jet}', - '#lambda_{#beta}^{ch}', '#lambda_{#beta}^{h}'] - - name = 'hResponse_JetPt_ang_Fnp_%sScaled' % label - h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) - for i in range(0, dim): - h.GetAxis(i).SetTitle(title[i]) - if i == 0 or i == 1: - h.SetBinEdges(i, self.pt_bins) - else: # i == 2 or i == 3 - h.SetBinEdges(i, self.obs_bins) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name_MPIon).append(h) - - if self.use_SD: - # SoftDrop groomed jet response matrices - for gl in self.grooming_labels: - name = 'hResponse_JetPt_ang_Fnp_%s_%sScaled' % (label, gl) - h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) - for i in range(0, dim): - h.GetAxis(i).SetTitle(title[i]) - if i == 0 or i == 1: - h.SetBinEdges(i, self.pt_bins) - else: # i == 2 or i == 3 - h.SetBinEdges(i, self.obs_bins) - h.Sumw2() - setattr(self, name, h) - getattr(self, hist_list_name_MPIon).append(h) - - - #--------------------------------------------------------------- - # Initiate jet defs, selectors, and sd (if required) - #--------------------------------------------------------------- - def init_jet_tools(self): - - for jetR in self.jetR_list: - jetR_str = str(jetR).replace('.', '') - - if not self.no_tree: - # Initialize tree writer - name = 'particle_unscaled_R%s' % jetR_str - t = ROOT.TTree(name, name) - setattr(self, "t_R%s" % jetR_str, t) - tw = RTreeWriter(tree=t) - setattr(self, "tw_R%s" % jetR_str, tw) - - # set up our jet definition and a jet selector - jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) - setattr(self, "jet_def_R%s" % jetR_str, jet_def) - print(jet_def) - - pwarning('max eta for particles after hadronization set to', self.max_eta_hadron) - parts_selector_h = fj.SelectorAbsEtaMax(self.max_eta_hadron) - - for jetR in self.jetR_list: - jetR_str = str(jetR).replace('.', '') - - jet_selector = fj.SelectorPtMin(5.0) & \ - fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) - setattr(self, "jet_selector_R%s" % jetR_str, jet_selector) - - #max_eta_parton = self.max_eta_hadron + 2. * jetR - #setattr(self, "max_eta_parton_R%s" % jetR_str, max_eta_parton) - #pwarning("Max eta for partons with jet R =", jetR, "set to", max_eta_parton) - #parts_selector_p = fj.SelectorAbsEtaMax(max_eta_parton) - #setattr(self, "parts_selector_p_R%s" % jetR_str, parts_selector_p) - - count1 = 0 # Number of jets rejected from ch-h matching - setattr(self, "count1_R%s" % jetR_str, count1) - count2 = 0 # Number of jets rejected from h-p matching - setattr(self, "count2_R%s" % jetR_str, count2) - - - #--------------------------------------------------------------- - # Calculate events and pass information on to jet finding - #--------------------------------------------------------------- - def calculate_events(self, pythia, MPIon=False): - - iev = 0 # Event loop count - - if MPIon: - hNevents = self.hNeventsMPI - else: - hNevents = self.hNevents - - while hNevents.GetBinContent(1) < self.nev: - if not pythia.next(): - continue - - parts_pythia_p = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) - - hstatus = pythia.forceHadronLevel() - if not hstatus: - #pwarning('forceHadronLevel false event', iev) - continue - #parts_pythia_h = pythiafjext.vectorize_select( - # pythia, [pythiafjext.kHadron, pythiafjext.kCharged]) - parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) - - parts_pythia_hch = pythiafjext.vectorize_select( - pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) - - """ TODO: fix for multiple jet R - parts_pythia_p_selected = parts_selector_p(parts_pythia_p) - parts_pythia_h_selected = parts_selector_h(parts_pythia_h) - parts_pythia_hch_selected = parts_selector_h(parts_pythia_hch) - - if self.debug_level > 1: - pinfo('debug partons...') - for p in parts_pythia_p_selected: - pyp = pythiafjext.getPythia8Particle(p) - print(pyp.name()) - pinfo('debug hadrons...') - for p in parts_pythia_h_selected: - pyp = pythiafjext.getPythia8Particle(p) - print(pyp.name()) - pinfo('debug ch. hadrons...') - for p in parts_pythia_hch_selected: - pyp = pythiafjext.getPythia8Particle(p) - print(pyp.name()) - """ - - # Some "accepted" events don't survive hadronization step -- keep track here - hNevents.Fill(0) - self.find_jets_fill_trees(parts_pythia_p, parts_pythia_h, parts_pythia_hch, iev, MPIon) - - iev += 1 - - - #--------------------------------------------------------------- - # Find jets, do matching between levels, and fill histograms & trees - #--------------------------------------------------------------- - def find_jets_fill_trees(self, parts_pythia_p, parts_pythia_h, parts_pythia_hch, - iev, MPIon=False): - - for jetR in self.jetR_list: - jetR_str = str(jetR).replace('.', '') - jet_selector = getattr(self, "jet_selector_R%s" % jetR_str) - jet_def = getattr(self, "jet_def_R%s" % jetR_str) - t = None; tw = None; - if not self.no_tree: - t = getattr(self, "t_R%s" % jetR_str) - tw = getattr(self, "tw_R%s" % jetR_str) - count1 = getattr(self, "count1_R%s" % jetR_str) - count2 = getattr(self, "count2_R%s" % jetR_str) - - # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) - jets_p = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_p))) - jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_h))) - jets_ch = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_hch))) - - if MPIon: - for jet in jets_ch: - self.fill_MPI_histograms(jetR, jet) - - if self.level and not MPIon: # Only save info at one level w/o matching - if not self.no_tree: - jets = locals()["jets_%s" % self.level] - for jet in jets: - self.fill_unmatched_jet_tree(tw, jetR, iev, jet) - continue - - for i,jchh in enumerate(jets_ch): - - # match hadron (full) jet - drhh_list = [] - for j, jh in enumerate(jets_h): - drhh = jchh.delta_R(jh) - if drhh < jetR / 2.: - drhh_list.append((j,jh)) - if len(drhh_list) != 1: - count1 += 1 - else: # Require unique match - j, jh = drhh_list[0] - - # match parton level jet - dr_list = [] - for k, jp in enumerate(jets_p): - dr = jh.delta_R(jp) - if dr < jetR / 2.: - dr_list.append((k, jp)) - if len(dr_list) != 1: - count2 += 1 - else: - k, jp = dr_list[0] - - if self.debug_level > 0: - pwarning('event', iev) - pinfo('matched jets: ch.h:', jchh.pt(), 'h:', jh.pt(), - 'p:', jp.pt(), 'dr:', dr) - - if not MPIon: - self.fill_jet_histograms(jetR, jp, jh, jchh) - if not self.no_tree: - self.fill_matched_jet_tree(tw, jetR, iev, jp, jh, jchh) - else: - self.fill_jet_histograms_MPI(jetR, jp, jh, jchh) - - #print(" |-> SD jet params z={0:10.3f} dR={1:10.3f} mu={2:10.3f}".format( - # sd_info.z, sd_info.dR, sd_info.mu)) - - if MPIon: - setattr(self, "count1_R%s_MPIon" % jetR_str, count1) - setattr(self, "count2_R%s_MPIon" % jetR_str, count2) - else: - setattr(self, "count1_R%s" % jetR_str, count1) - setattr(self, "count2_R%s" % jetR_str, count2) - - - #--------------------------------------------------------------- - # Fill jet tree with (unscaled/raw) matched parton/hadron tracks - #--------------------------------------------------------------- - def fill_matched_jet_tree(self, tw, jetR, iev, jp, jh, jchh): - - tw.fill_branch('iev', iev) - tw.fill_branch('ch', jchh) - tw.fill_branch('h', jh) - tw.fill_branch('p', jp) - - kappa = 1 - for beta in self.beta_list: - label = str(beta).replace('.', '') - tw.fill_branch("l_ch_%s" % label, - fjext.lambda_beta_kappa(jchh, beta, kappa, jetR)) - tw.fill_branch("l_h_%s" % label, - fjext.lambda_beta_kappa(jh, beta, kappa, jetR)) - tw.fill_branch("l_p_%s" % label, - fjext.lambda_beta_kappa(jp, beta, kappa, jetR)) - - # Save SoftDrop variables as well if desired - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - - # SoftDrop jets - gshop_chh = fjcontrib.GroomerShop(jchh, jetR, self.reclustering_algorithm) - jet_sd_chh = self.utils.groom(gshop_chh, gs, jetR).pair() - gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) - jet_sd_h = self.utils.groom(gshop_h, gs, jetR).pair() - gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) - jet_sd_p = self.utils.groom(gshop_p, gs, jetR).pair() - - tw.fill_branch("l_ch_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jchh, jet_sd_chh, beta, kappa, jetR)) - tw.fill_branch("l_h_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jh, jet_sd_h, beta, kappa, jetR)) - tw.fill_branch("l_p_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jp, jet_sd_p, beta, kappa, jetR)) - - - #--------------------------------------------------------------- - # Fill jet tree with (unscaled/raw) unmatched parton/hadron tracks - #--------------------------------------------------------------- - def fill_unmatched_jet_tree(self, tw, jetR, iev, jet): - - tw.fill_branch('iev', iev) - tw.fill_branch(self.level, jet) - - kappa = 1 - for beta in self.beta_list: - label = str(beta).replace('.', '') - tw.fill_branch('l_%s_%s' % (self.level, label), - fjext.lambda_beta_kappa(jet, beta, kappa, jetR)) - - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - - # SoftDrop jets - gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) - jet_sd = self.utils.groom(gshop, gs, jetR).pair() - - tw.fill_branch("l_ch_%s_%s" % (label, gl), fjext.lambda_beta_kappa( - jet, jet_sd, beta, kappa, jetR)) - - - #--------------------------------------------------------------- - # Fill jet histograms for MPI-on PYTHIA run-through - #--------------------------------------------------------------- - def fill_MPI_histograms(self, jetR, jet): - - for beta in self.beta_list: - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') - h = getattr(self, 'hAng_JetPt_ch_MPIon_%sScaled' % label) - - kappa = 1 - h.Fill(jet.pt(), fjext.lambda_beta_kappa(jet, beta, kappa, jetR)) - - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) - jet_sd = self.utils.groom(gshop, gs, jetR).pair() - - getattr(self, 'hAng_JetPt_ch_MPIon_%s_%sScaled' % (label, gl)).Fill( - jet.pt(), fjext.lambda_beta_kappa(jet, jet_sd, beta, kappa, jetR)) - - - #--------------------------------------------------------------- - # Fill jet histograms - #--------------------------------------------------------------- - def fill_jet_histograms(self, jetR, jp, jh, jch): - - R_label = str(jetR).replace('.', '') + 'Scaled' - - # Fill jet histograms which are not dependant on angualrity - if self.level in [None, 'ch']: - getattr(self, 'hJetPt_ch_R%s' % R_label).Fill(jch.pt()) - getattr(self, 'hNconstit_Pt_ch_R%s' % R_label).Fill(jch.pt(), len(jch.constituents())) - if self.level in [None, 'h']: - getattr(self, 'hJetPt_h_R%s' % R_label).Fill(jh.pt()) - getattr(self, 'hNconstit_Pt_h_R%s' % R_label).Fill(jh.pt(), len(jh.constituents())) - if self.level in [None, 'p']: - getattr(self, 'hJetPt_p_R%s' % R_label).Fill(jp.pt()) - getattr(self, 'hNconstit_Pt_p_R%s' % R_label).Fill(jp.pt(), len(jp.constituents())) - - if self.level == None: - if jp.pt(): # prevent divide by 0 - getattr(self, 'hJetPtRes_R%s' % R_label).Fill(jp.pt(), (jp.pt() - jch.pt()) / jp.pt()) - getattr(self, 'hResponse_JetPt_R%s' % R_label).Fill(jp.pt(), jch.pt()) - - ''' - if 60 <= jch.pt() < 80: - getattr(self, 'hNconstit_Pt_ch_PtBinCH60-80_R%s' % R_label).Fill( - jch.pt(), len(jch.constituents())) - getattr(self, 'hNconstit_Pt_h_PtBinCH60-80_R%s' % R_label).Fill( - jh.pt(), len(jh.constituents())) - getattr(self, 'hNconstit_Pt_p_PtBinCH60-80_R%s' % R_label).Fill( - jp.pt(), len(jp.constituents())) - ''' - - # Fill angularity histograms and response matrices - for beta in self.beta_list: - self.fill_RMs(jetR, beta, jp, jh, jch) - - - #--------------------------------------------------------------- - # Fill jet histograms - #--------------------------------------------------------------- - def fill_RMs(self, jetR, beta, jp, jh, jch): - - # Calculate angularities - kappa = 1 - lp = fjext.lambda_beta_kappa(jp, beta, kappa, jetR) - lh = fjext.lambda_beta_kappa(jh, beta, kappa, jetR) - lch = fjext.lambda_beta_kappa(jch, beta, kappa, jetR) - - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') - - if self.level in [None, 'ch']: - getattr(self, 'hAng_JetPt_ch_%sScaled' % label).Fill(jch.pt(), lch) - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - gshop = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) - jch_sd = self.utils.groom(gshop, gs, jetR).pair() - getattr(self, 'hAng_JetPt_ch_%s_%sScaled' % (label, gl)).Fill( - jch.pt(), fjext.lambda_beta_kappa(jch, jch_sd, beta, kappa, jetR)) - - if self.level in [None, 'h']: - getattr(self, 'hAng_JetPt_h_%sScaled' % label).Fill(jh.pt(), lh) - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - gshop = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) - jh_sd = self.utils.groom(gshop, gs, jetR).pair() - getattr(self, 'hAng_JetPt_h_%s_%sScaled' % (label, gl)).Fill( - jh.pt(), fjext.lambda_beta_kappa(jh, jh_sd, beta, kappa, jetR)) - - if self.level in [None, 'p']: - getattr(self, 'hAng_JetPt_p_%sScaled' % label).Fill(jp.pt(), lp) - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - gshop = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) - jp_sd = self.utils.groom(gshop, gs, jetR).pair() - getattr(self, 'hAng_JetPt_p_%s_%sScaled' % (label, gl)).Fill( - jp.pt(), fjext.lambda_beta_kappa(jp, jp_sd, beta, kappa, jetR)) - - if self.level == None: - getattr(self, 'hResponse_ang_%sScaled' % label).Fill(lp, lch) - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) - jp_sd = self.utils.groom(gshop_p, gs, jetR).pair() - gshop_ch = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) - jch_sd = self.utils.groom(gshop_ch, gs, jetR).pair() - getattr(self, 'hResponse_ang_%s_%sScaled' % (label, gl)).Fill( - fjext.lambda_beta_kappa(jp, jp_sd, beta, kappa, jetR), \ - fjext.lambda_beta_kappa(jch, jch_sd, beta, kappa, jetR)) - - ''' - # Lambda at p-vs-ch-level for various bins in ch jet pT - if 20 <= jch.pt() < 40: - getattr(self, 'hResponse_ang_PtBinCH20-40_%sScaled' % label).Fill(lp, lch) - elif 40 <= jch.pt() < 60: - getattr(self, 'hResponse_ang_PtBinCH40-60_%sScaled' % label).Fill(lp, lch) - elif 60 <= jch.pt() < 80: - getattr(self, 'hResponse_ang_PtBinCH60-80_%sScaled' % label).Fill(lp, lch) - - # Phase space plots and annulus histograms, including those binned in ch jet pT - num_r = self.annulus_plots_num_r - ang_per_r_ch = [0] * num_r - for particle in jch.constituents(): - deltaR = particle.delta_R(jch) - getattr(self, 'hPhaseSpace_DeltaR_Pt_ch_%sScaled' % label).Fill( - particle.pt(), deltaR / jetR) - - lambda_i = lambda_beta_kappa_i(particle, jch, jetR, beta, 1) - getattr(self, 'hPhaseSpace_ang_DeltaR_ch_%sScaled' % label).Fill(deltaR / jetR, lambda_i) - getattr(self, 'hPhaseSpace_ang_Pt_ch_%sScaled' % label).Fill(particle.pt(), lambda_i) - - if 60 <= jch.pt() < 80: - getattr(self, 'hPhaseSpace_DeltaR_Pt_ch_PtBinCH60-80_%sScaled' % label).Fill( - particle.pt(), deltaR / jetR) - getattr(self, 'hPhaseSpace_ang_DeltaR_ch_PtBinCH60-80_%sScaled' % label).Fill( - deltaR / jetR, lambda_i) - getattr(self, 'hPhaseSpace_ang_Pt_ch_PtBinCH60-80_%sScaled' % label).Fill( - particle.pt(), lambda_i) - - ang_per_r_ch = [ang_per_r_ch[i] + lambda_i * - (deltaR <= ((i+1) * jetR * self.annulus_plots_max_x / num_r)) - for i in range(0, num_r, 1)] - - ang_per_r_p = [0] * num_r - for particle in jp.constituents(): - deltaR = particle.delta_R(jp) - getattr(self, 'hPhaseSpace_DeltaR_Pt_p_%sScaled' % label).Fill( - particle.pt(), deltaR / jetR) - - lambda_i = lambda_beta_kappa_i(particle, jp, jetR, beta, 1) - getattr(self, 'hPhaseSpace_ang_DeltaR_p_%sScaled' % label).Fill(deltaR / jetR, lambda_i) - getattr(self, 'hPhaseSpace_ang_Pt_p_%sScaled' % label).Fill(particle.pt(), lambda_i) - - if 60 <= jch.pt() < 80: - getattr(self, 'hPhaseSpace_DeltaR_Pt_p_PtBinCH60-80_%sScaled' % label).Fill( - particle.pt(), deltaR / jetR) - getattr(self, 'hPhaseSpace_ang_DeltaR_p_PtBinCH60-80_%sScaled' % label).Fill( - deltaR / jetR, lambda_i) - getattr(self, 'hPhaseSpace_ang_Pt_p_PtBinCH60-80_%sScaled' % label).Fill( - particle.pt(), lambda_i) - - ang_per_r_p = [ang_per_r_p[i] + lambda_i * - (deltaR <= ((i+1) * jetR * self.annulus_plots_max_x / num_r)) - for i in range(0, num_r, 1)] - - for i in range(0, num_r, 1): - getattr(self, 'hAnnulus_ang_p_%sScaled' % label).Fill( - (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_p[i] / (lp + 1e-11)) - getattr(self, 'hAnnulus_ang_ch_%sScaled' % label).Fill( - (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_ch[i] / (lch + 1e-11)) - if 60 <= jch.pt() < 80: - getattr(self, 'hAnnulus_ang_p_PtBinCH60-80_%sScaled' % label).Fill( - (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_p[i] / (lp + 1e-11)) - getattr(self, 'hAnnulus_ang_ch_PtBinCH60-80_%sScaled' % label).Fill( - (i+1) * self.annulus_plots_max_x / num_r, ang_per_r_ch[i] / (lch + 1e-11)) - ''' - - # Residual plots (with and without divisor in y-axis) - getattr(self, "hAngDiff_JetPt_%sScaled" % label).Fill(jch.pt(), lp - lch) - if lp: # prevent divide by 0 - getattr(self, "hAngResidual_JetPt_%sScaled" % label).Fill(jp.pt(), (lp - lch) / lp) - - # 4D response matrices for "forward folding" to ch level - x = ([jch.pt(), jp.pt(), lch, lp]) - x_array = array('d', x) - getattr(self, 'hResponse_JetPt_ang_ch_%sScaled' % label).Fill(x_array) - - x = ([jh.pt(), jp.pt(), lh, lp]) - x_array = array('d', x) - getattr(self, 'hResponse_JetPt_ang_h_%sScaled' % label).Fill(x_array) - - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - - # SoftDrop jet angularities - gshop_ch = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) - jet_sd_ch = self.utils.groom(gshop_ch, gs, jetR).pair() - lch_sd = fjext.lambda_beta_kappa(jch, jet_sd_ch, beta, kappa, jetR) - gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) - jet_sd_h = self.utils.groom(gshop_h, gs, jetR).pair() - lh_sd = fjext.lambda_beta_kappa(jh, jet_sd_h, beta, kappa, jetR) - gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) - jet_sd_p = self.utils.groom(gshop_p, gs, jetR).pair() - lp_sd = fjext.lambda_beta_kappa(jp, jet_sd_p, beta, kappa, jetR) - - x = ([jch.pt(), jp.pt(), lch_sd, lp_sd]) - x_array = array('d', x) - getattr(self, 'hResponse_JetPt_ang_ch_%s_%sScaled' % (label, gl)).Fill(x_array) - - x = ([jh.pt(), jp.pt(), lh, lp]) - x_array = array('d', x) - getattr(self, 'hResponse_JetPt_ang_h_%s_%sScaled' % (label, gl)).Fill(x_array) - - - #--------------------------------------------------------------- - # Fill jet histograms for MPI (which are just the H-->CH RMs) - #--------------------------------------------------------------- - def fill_jet_histograms_MPI(self, jetR, jp, jh, jch): - - for beta in self.beta_list: - - # Calculate angularities - kappa = 1 - lh = fjext.lambda_beta_kappa(jh, beta, kappa, jetR) - lch = fjext.lambda_beta_kappa(jch, beta, kappa, jetR) - - label = ("R%s_%s" % (str(jetR), str(beta))).replace('.', '') - - # 4D response matrices for "forward folding" from h to ch level - x = ([jch.pt(), jh.pt(), lch, lh]) - x_array = array('d', x) - getattr(self, 'hResponse_JetPt_ang_Fnp_%sScaled' % label).Fill(x_array) - - if self.use_SD: - for i, gs in enumerate(self.grooming_settings): - gl = self.grooming_labels[i] - - # SoftDrop jet angularities - gshop_ch = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) - jet_sd_ch = self.utils.groom(gshop_ch, gs, jetR).pair() - lch_sd = fjext.lambda_beta_kappa(jch, jet_sd_ch, beta, kappa, jetR) - gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) - jet_sd_h = self.utils.groom(gshop_h, gs, jetR).pair() - lh_sd = fjext.lambda_beta_kappa(jh, jet_sd_h, beta, kappa, jetR) - - x = ([jch.pt(), jh.pt(), lch_sd, lh_sd]) - x_array = array('d', x) - getattr(self, 'hResponse_JetPt_ang_Fnp_%s_%sScaled' % (label, gl)).Fill(x_array) - - - #--------------------------------------------------------------- - # Initiate scaling of all histograms and print final simulation info - #--------------------------------------------------------------- - def scale_print_final_info(self, pythia, pythia_MPI): - - # Scale all jet histograms by the appropriate factor from generated cross section - # and the number of accepted events - if not self.no_scale: - scale_f = pythia.info.sigmaGen() / self.hNevents.GetBinContent(1) - print("Weight MPIoff histograms by (cross section)/(N events) =", scale_f) - MPI_scale_f = pythia_MPI.info.sigmaGen() / self.hNeventsMPI.GetBinContent(1) - print("Weight MPIon histograms by (cross section)/(N events) =", MPI_scale_f) - self.scale_jet_histograms(scale_f, MPI_scale_f) - print() - - print("N total final MPI-off events:", int(self.hNevents.GetBinContent(1)), "with", - int(pythia.info.nAccepted() - self.hNevents.GetBinContent(1)), - "events rejected at hadronization step") - self.hNevents.SetBinError(1, 0) - - for jetR in self.jetR_list: - jetR_str = str(jetR).replace('.', '') - count1 = getattr(self, "count1_R%s" % jetR_str) - count2 = getattr(self, "count2_R%s" % jetR_str) - print(("For R=%s: %i jets cut at first match criteria; " + \ - "%i jets cut at second match criteria.") % - (str(jetR), count1, count2)) - print() - - - #--------------------------------------------------------------- - # Scale all jet histograms by sigma/N - #--------------------------------------------------------------- - def scale_jet_histograms(self, scale_f, MPI_scale_f): - - for jetR in self.jetR_list: - hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') - for h in getattr(self, hist_list_name): - h.Scale(scale_f) - - hist_list_MPIon_name = "hist_list_MPIon_R%s" % str(jetR).replace('.', '') - for h in getattr(self, hist_list_MPIon_name): - h.Scale(MPI_scale_f) - - -################################################################ -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', - prog=os.path.basename(__file__)) - pyconf.add_standard_pythia_args(parser) - # Could use --py-seed - parser.add_argument('--user-seed', help='PYTHIA starting seed', default=1111, type=int) - parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', - help='Output directory for generated ROOT file(s)') - parser.add_argument('--tree-output-fname', default="AnalysisResults.root", type=str, - help="Filename for the (unscaled) generated particle ROOT TTree") - parser.add_argument('--no-tree', default=False, action='store_true', - help="Do not save tree of particle information, only create histograms") - parser.add_argument('--no-match-level', help="Save simulation for only one level with " + \ - "no matching. Options: 'p', 'h', 'ch'", default=None, type=str) - parser.add_argument('--no-scale', help="Turn off rescaling all histograms by cross section / N", - action='store_true', default=False) - parser.add_argument('-c', '--config_file', action='store', type=str, default='config/angularity.yaml', - help="Path of config file for observable configurations") - args = parser.parse_args() - - if args.no_match_level not in [None, 'p', 'h', 'ch']: - print("ERROR: Unrecognized type %s. Please use 'p', 'h', or 'ch'" % args.type_only) - exit(1) - - # If invalid configFile is given, exit - if not os.path.exists(args.config_file): - print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) - sys.exit(0) - - # Use PYTHIA seed for event generation - if args.user_seed < 0: - args.user_seed = 1111 - - # Have at least 1 event - if args.nev < 1: - args.nev = 1 - - if args.py_noMPI: - print("\033[91m%s\033[00m" % "WARNING: py-noMPI flag ignored for this program") - time.sleep(3) - print() - - process = pythia_parton_hadron(config_file=args.config_file, output_dir=args.output_dir, args=args) - process.pythia_parton_hadron(args) diff --git a/pyjetty/alice_analysis/process/user/fastsim/eff_smear.py b/pyjetty/alice_analysis/process/user/fastsim/eff_smear.py index 0e0f1533f..f59e24859 100644 --- a/pyjetty/alice_analysis/process/user/fastsim/eff_smear.py +++ b/pyjetty/alice_analysis/process/user/fastsim/eff_smear.py @@ -17,6 +17,8 @@ import pandas as pd import numpy as np +import ROOT + from pyjetty.alice_analysis.process.base import process_io from pyjetty.cstoy import alice_efficiency @@ -37,10 +39,13 @@ class eff_smear: #--------------------------------------------------------------- # Constructor #--------------------------------------------------------------- - def __init__(self, inputFile='', outputDir='', is_jetscape=False): + def __init__(self, inputFile='', outputDir='', is_jetscape=False, is_jewel=False): self.input_file = inputFile self.output_dir = outputDir self.is_jetscape = is_jetscape + self.is_jewel = is_jewel + if self.is_jetscape and self.is_jewel: + raise ValueError("Cannot be both JETSCAPE and JEWEL") #--------------------------------------------------------------- # Main processing function @@ -59,7 +64,7 @@ def eff_smear(self): # Build truth-level histogram of track pT multiplicity print("Building truth-level track pT histogram...") - self.hist_list.append( ("truth_pt", self.build_pt_hist()) ) + self.hist_list.append( ("truth_pt", self.build_pt_hist("truth_pt")) ) print('--- {} seconds ---'.format(time.time() - start_time)) # Apply eta cut at the end of the TPC @@ -72,7 +77,7 @@ def eff_smear(self): # Build truth-level histogram of track pT multiplicity after efficiency cuts print("Building truth-level track pT histogram after efficiency cuts...") - self.hist_list.append( ("truth_pt_eff_cuts", self.build_pt_hist()) ) + self.hist_list.append( ("truth_pt_eff_cuts", self.build_pt_hist("truth_pt_eff_cuts")) ) print('--- {} seconds ---'.format(time.time() - start_time)) # Apply pT smearing @@ -81,7 +86,7 @@ def eff_smear(self): # Build truth-level histogram of track pT multiplicity print("Building detector-level track pT histogram...") - self.hist_list.append( ("fastsim_pt", self.build_pt_hist()) ) + self.hist_list.append( ("fastsim_pt", self.build_pt_hist("fastsim_pt")) ) print('--- {} seconds ---'.format(time.time() - start_time)) # ------------------------------------------------------------------------ @@ -89,8 +94,9 @@ def eff_smear(self): # Write data to file print(self.df_fjparticles) print("Writing fast simulation to ROOT TTree...") - self.io.save_dataframe("AnalysisResultsFastSim.root", self.df_fjparticles, - df_true=True, histograms=self.hist_list, is_jetscape=self.is_jetscape) + self.io.save_dataframe( + "AnalysisResultsFastSim.root", self.df_fjparticles, df_true=True, histograms=self.hist_list, + is_jetscape=self.is_jetscape, is_jewel=self.is_jewel) print('--- {} seconds ---'.format(time.time() - start_time)) @@ -100,11 +106,10 @@ def eff_smear(self): def init_df(self): # Use IO helper class to convert truth-level ROOT TTree into # a SeriesGroupBy object of fastjet particles per event - self.io = process_io.ProcessIO(input_file=self.input_file, output_dir=self.output_dir, - tree_dir='PWGHF_TreeCreator', - track_tree_name='tree_Particle_gen', - use_ev_id_ext=False, - is_jetscape=self.is_jetscape) + self.io = process_io.ProcessIO( + input_file=self.input_file, output_dir=self.output_dir, tree_dir='PWGHF_TreeCreator', + track_tree_name='tree_Particle_gen', use_ev_id_ext=False, + is_jetscape=self.is_jetscape, is_jewel=self.is_jewel) self.df_fjparticles = self.io.load_dataframe() self.nTracks_truth = len(self.df_fjparticles) print("DataFrame loaded from data.") @@ -125,11 +130,17 @@ def apply_eta_cut(self, df): #--------------------------------------------------------------- # Build histogram of pT values and return it #--------------------------------------------------------------- - def build_pt_hist(self): - bins = np.concatenate((np.arange(0, 0.3, 0.05), np.arange(0.3, 1, 0.1), np.arange(1, 3, 0.2), + def build_pt_hist(self, name): + bins = np.concatenate((np.arange(0, 0.3, 0.05), np.arange(0.3, 1, 0.1), np.arange(1, 3, 0.2), np.arange(3, 10, 0.5), np.arange(10, 20, 1), np.arange(20, 50, 2), np.arange(50, 155, 5))) - return np.histogram(self.df_fjparticles["ParticlePt"], bins=bins) + #return np.histogram(self.df_fjparticles["ParticlePt"], bins=bins) + h = ROOT.TH1F(name, name, len(bins)-1, bins) + h.Sumw2() + h.SetDirectory(0) + for pt in self.df_fjparticles["ParticlePt"]: + h.Fill(pt) + return h #--------------------------------------------------------------- # Apply efficiency cuts @@ -155,6 +166,12 @@ def apply_pt_smear(self, df): "ParticlePhi": df["ParticlePhi"], "z_vtx_reco": df["z_vtx_reco"], "is_ev_rej": df["is_ev_rej"], "status": df["status"]}) + elif self.is_jewel: + df = pd.DataFrame({"run_number": df["run_number"], "ev_id": df["ev_id"], + "ParticlePt": smeared_pt, "ParticleEta": df["ParticleEta"], + "ParticlePhi": df["ParticlePhi"], "z_vtx_reco": df["z_vtx_reco"], + "is_ev_rej": df["is_ev_rej"], + "Status": df["Status"]}) else: df = pd.DataFrame({"run_number": df["run_number"], "ev_id": df["ev_id"], "ParticlePt": smeared_pt, "ParticleEta": df["ParticleEta"], @@ -167,8 +184,15 @@ def apply_pt_smear(self, df): pt_bins = np.concatenate((np.arange(0, 1, 0.1), np.arange(1, 10, .5), np.arange(10, 20, 1), np.arange(20, 50, 2), np.arange(50, 95, 5))) dif_bins = np.arange(-0.5, 0.5, .001) - pt_smearing_dists = np.histogram2d(true_pt, pt_dif, bins=[pt_bins, dif_bins]) - self.hist_list.append( ("pt_smearing", pt_smearing_dists) ) + #pt_smearing_dists = np.histogram2d(true_pt, pt_dif, bins=[pt_bins, dif_bins]) + #self.hist_list.append( ("pt_smearing", pt_smearing_dists) ) + + h = ROOT.TH2F("pt_smearing", "pt_smearing", len(pt_bins)-1, pt_bins, len(dif_bins)-1, dif_bins) + h.Sumw2() + h.SetDirectory(0) + for pt, dif in zip(true_pt, pt_dif): + h.Fill(pt, dif) + self.hist_list.append( ("pt_smearing", h) ) return df @@ -181,6 +205,7 @@ def apply_pt_smear(self, df): parser.add_argument("-o", "--outputDir", action="store", type=str, metavar="outputDir", default="./TestOutput", help="Output path for fast sim ROOT TTree") parser.add_argument('--jetscape', action='store_true') + parser.add_argument('--jewel', action='store_true') args = parser.parse_args() print('Configuring...') @@ -193,5 +218,6 @@ def apply_pt_smear(self, df): print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) sys.exit(0) - processor = eff_smear(inputFile=args.inputFile, outputDir=args.outputDir, is_jetscape=args.jetscape) + processor = eff_smear(inputFile=args.inputFile, outputDir=args.outputDir, + is_jetscape=args.jetscape, is_jewel=args.jewel) processor.eff_smear() diff --git a/pyjetty/alice_analysis/process/user/fastsim/jewel_xsec_parser.py b/pyjetty/alice_analysis/process/user/fastsim/jewel_xsec_parser.py new file mode 100644 index 000000000..b2d8adfcb --- /dev/null +++ b/pyjetty/alice_analysis/process/user/fastsim/jewel_xsec_parser.py @@ -0,0 +1,71 @@ +''' jewel_xsec_parser.py +Recursively iterate through jewel.log files, find cross sections, +sum event numbers, and create scaleFactors.yaml file to use for +pT-hat bin scaling. + +Author: Ezra Lesser (elesser@berkeley.edu), Winter 2022 +''' + +from __future__ import division, print_function +from os import path +import numpy as np + +###################################################################### +### USER-DEFINED CONSTANTS HERE: + +# Input directory where the pT-hat bins are stored +IN_DIR = "/rstorage/generators/jewel_alice/tree_gen/823890/" + +# Output directory where the scaleFactors.yaml files should be created +OUT_DIR = "./" + +# Number of pT-hat bins +N_PTHAT = 20 + +# Number of statistics bins +N_STATS = 400 + +# Multiply all scale factors by a constant so that first bin is 10 +# (Useful to prevent very small scale factors) +scale_to_first_bin = True + + +###################################################################### +###################################################################### + +print("Calculating the scale factors per each pT-hat bin...") + +scale_factors = np.zeros(N_PTHAT) + +# Loop on pT-hat bins (1-N_PTHAT) +for i in range(1, N_PTHAT+1): + print("pT-hat bin %i..." % i, end='\r') + N_counts = 0 + sigma = 0 + # Loop over the different statistics for each bin + for j in range(1, N_STATS+1): + filename = path.join(IN_DIR, str(i), str(j), "jewel.log") + with open(filename, 'r') as f: + for line in f: + if line[1:14] == "cross section": + words = [word for word in line.strip().split(' ') if len(word)] + sigma += float(words[-2]) + elif line[1:21] == "sum of event weights": + words = [word for word in line.strip().split(' ') if len(word)] + N_counts += float(words[-1]) + # Average sigma per file + sigma /= N_STATS + # Save sigma / N as the scale factor for this pT-hat bin + scale_factors[i-1] = sigma / N_counts + +if scale_to_first_bin: + val = scale_factors[0] / 10. + scale_factors = [sf/val for sf in scale_factors] + +print("Creating scaleFactors.yaml file in %s" % OUT_DIR) + +with open(path.join(OUT_DIR, "scaleFactors.yaml"), "w") as f: + for i in range(1, N_PTHAT+1): + f.write("%i: %.15e\n" % (i, scale_factors[i-1])) + +print("Done!") diff --git a/pyjetty/alice_analysis/process/user/gen/pythia_parton_hadron.py b/pyjetty/alice_analysis/process/user/gen/pythia_parton_hadron.py index 5ba1169e6..497e1bf7a 100755 --- a/pyjetty/alice_analysis/process/user/gen/pythia_parton_hadron.py +++ b/pyjetty/alice_analysis/process/user/gen/pythia_parton_hadron.py @@ -116,12 +116,14 @@ def calculate_events(self, pythia, MPIon=False): continue # Creates std::vector of final-state hadrons - parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + #parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + parts_pythia_ch = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) # h_is_charged is a std::vector - h_is_charged = pythiafjext.is_charged(pythia, [pythiafjext.kFinal], 0, True) + #h_is_charged = pythiafjext.is_charged(pythia, [pythiafjext.kFinal], 0, True) + h_is_charged = [True for part in parts_pythia_ch] - self.fill_branches(parts_pythia_p, parts_pythia_h, h_is_charged, iev, self.user_seed, MPIon) + self.fill_branches(parts_pythia_p, parts_pythia_ch, h_is_charged, iev, self.user_seed, MPIon) # Some "accepted" events don't survive hadronization step -- keep track here iev += 1 diff --git a/pyjetty/alice_analysis/process/user/james/herwig_parton_hadron.py b/pyjetty/alice_analysis/process/user/james/herwig_parton_hadron.py new file mode 100755 index 000000000..4a352ddb6 --- /dev/null +++ b/pyjetty/alice_analysis/process/user/james/herwig_parton_hadron.py @@ -0,0 +1,997 @@ +#!/usr/bin/env python + +from __future__ import print_function + +import fastjet as fj +import fjcontrib +import fjext + +import ROOT + +import tqdm +import yaml +import copy +import argparse +import os + +from pyjetty.mputils import * + +from pyjetty.alice_analysis.process.base import process_base + +from array import array +import numpy as np + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) +# Automatically set Sumw2 when creating new histograms +ROOT.TH1.SetDefaultSumw2() + +################################################################ +class herwig_parton_hadron(process_base.ProcessBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', + debug_level=0, args=None, **kwargs): + + super(herwig_parton_hadron, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + self.initialize_config(args) + + + #--------------------------------------------------------------- + # Initialize config file into class members + #--------------------------------------------------------------- + def initialize_config(self, args): + + # Call base class initialization + process_base.ProcessBase.initialize_config(self) + + # Read config file + with open(self.config_file, 'r') as stream: + config = yaml.safe_load(stream) + + if not os.path.exists(self.output_dir): + os.makedirs(self.output_dir) + + self.herwig_file = args.input_file + self.herwig_file_MPI = args.input_file_mpi + + # Defaults to None if not in use + self.level = args.no_match_level + + self.jetR_list = config["jetR"] + + # Formatted LaTeX names for plotting + self.obs_names = ["#it{#theta}_{g}", "#it{z}_{g}"] + self.observables = config['process_observables'] + self.obs_settings = {} + self.obs_grooming_settings = {} + for observable in self.observables: + obs_config_dict = config[observable] + obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + self.obs_settings[observable] = self.utils.obs_settings( + observable, obs_config_dict, obs_subconfig_list) + self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) + + # Construct set of unique grooming settings + self.grooming_settings = [] + lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observables] + for observable in lists_grooming: + for setting in observable: + if setting not in self.grooming_settings and setting != None: + self.grooming_settings.append(setting) + self.grooming_labels = [self.utils.grooming_label(gs) for gs in self.grooming_settings] + + # Observable binnings for theta_g and zg + self.obs_bins_theta_g = array('d', np.arange(0, 1.01, 0.01)) + self.obs_bins_zg = array('d', np.arange(0, 0.505, 0.005)) + + # We are not reporting zg theory so save time/memory by skipping these histograms + self.skip_zg = True + if self.skip_zg: + self.obs_names = self.obs_names[:-1] + self.observables = self.observables[:-1] + + # Manually added binnings for RM and scaling histograms + if 'theory_pt_bins' in config: + self.pt_bins = array('d', config['theory_pt_bins']) + + # hadron level - ALICE tracking restriction + self.max_eta_hadron = 0.9 + + # Whether or not to rescale final jet histograms based on sigma/N + self.no_scale = args.no_scale + + # Whether or not to save particle info in raw tree structure + self.no_tree = args.no_tree + + # Initialize variables for final cross sections from event generator + self.xsec = None + self.xsec_MPI = None + + + #--------------------------------------------------------------- + # Main processing function + #--------------------------------------------------------------- + def herwig_parton_hadron(self, args): + + # Create ROOT TTree file for storing raw PYTHIA particle information + outf_path = os.path.join(self.output_dir, args.tree_output_fname) + outf = ROOT.TFile(outf_path, 'recreate') + outf.cd() + + # Initialize response histograms + self.initialize_hist() + + # Print the banner first + fj.ClusterSequence.print_banner() + print() + + self.init_jet_tools() + self.parse_events() + if self.herwig_file_MPI: + self.parse_events(MPIon=True) + + if not self.no_tree: + for jetR in self.jetR_list: + getattr(self, "tw_R%s" % str(jetR).replace('.', '')).fill_tree() + + # Scale histograms + self.scale_print_final_info() + + outf.Write() + outf.Close() + + self.save_output_objects() + + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_hist(self): + + self.hNevents = ROOT.TH1I("hNevents", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + self.hNeventsMPI = ROOT.TH1I("hNeventsMPI", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + + for jetR in self.jetR_list: + + # Store a list of all the histograms just so that we can rescale them later + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + setattr(self, hist_list_name, []) + hist_list_name_MPIon = "hist_list_MPIon_R%s" % str(jetR).replace('.', '') + setattr(self, hist_list_name_MPIon, []) + + R_label = str(jetR) + 'Scaled' + + if self.level in [None, 'ch']: + name = 'hJetPt_ch_R%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{ch jet};#frac{dN}{dp_{T}^{ch jet}};', 300, 0, 300) + h.Sumw2() # enables calculation of errors + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hNconstit_Pt_ch_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'h']: + name = 'hJetPt_h_R%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{jet, h};#frac{dN}{dp_{T}^{jet, h}};', 300, 0, 300) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hNconstit_Pt_h_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{h jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{h jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'p']: + name = 'hJetPt_p_R%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{jet, parton};#frac{dN}{dp_{T}^{jet, parton}};', + 300, 0, 300) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hNconstit_Pt_p_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{p jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{p jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level == None: + name = 'hJetPtRes_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 1.) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle( + '#frac{#it{p}_{T}^{parton jet}-#it{p}_{T}^{ch jet}}{#it{p}_{T}^{parton jet}}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hResponse_JetPt_R%s' % R_label + h = ROOT.TH2F(name, name, 200, 0, 200, 200, 0, 200) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + for i_obs, obs in enumerate(self.observables): + obs_bins = getattr(self, "obs_bins_%s" % obs) + obs_name = self.obs_names[i_obs] + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + label = "R%s_%s" % (str(jetR), gl) + + if self.level in [None, 'ch']: + name = 'h_JetPt_%s_ch_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{ch jet}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{ch}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'h_JetPt_%s_ch_MPIon_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{ch jet}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{ch}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name_MPIon).append(h) + + if self.level in [None, 'h']: + name = 'h_JetPt_%s_h_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{jet, h}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{h}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'p']: + name = 'h_JetPt_%s_p_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{jet, parton}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{parton}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level == None: + name = 'hResponse_%s_p_ch_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) + h.GetXaxis().SetTitle('%s^{parton}' % obs_name) + h.GetYaxis().SetTitle('%s^{ch}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = "hResidual_JetPt_%s_MPIoff_%sScaled" % (obs, label) + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -3., 1.) + h.GetXaxis().SetTitle('p_{T}^{p jet}') + h.GetYaxis().SetTitle('#frac{%s^{p}-%s^{ch}}{%s^{p}}' % (obs_name, obs_name, obs_name)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = "hDiff_JetPt_%s_MPIoff_%sScaled" % (obs, label) + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -2., 2.) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('%s^{p}-%s^{ch}' % (obs_name, obs_name)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Create THn of response + dim = 4 + title = ['p_{T}^{ch jet}', 'p_{T}^{parton jet}', + obs_name + '^{ch}', obs_name + '^{parton}'] + nbins = [len(self.pt_bins)-1, len(self.pt_bins)-1, + len(obs_bins)-1, len(obs_bins)-1] + min_li = [self.pt_bins[0], self.pt_bins[0], + obs_bins[0], obs_bins[0] ] + max_li = [self.pt_bins[-1], self.pt_bins[-1], + obs_bins[-1], obs_bins[-1] ] + + name = 'hResponse_JetPt_%s_p_ch_MPIoff_%sScaled' % (obs, label) + nbins = (nbins) + xmin = (min_li) + xmax = (max_li) + nbins_array = array('i', nbins) + xmin_array = array('d', xmin) + xmax_array = array('d', xmax) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Another set of THn for full hadron folding + title = ['p_{T}^{h jet}', 'p_{T}^{parton jet}', + obs_name + '^{h}', obs_name + '^{parton}'] + + name = 'hResponse_JetPt_%s_p_h_MPIoff_%sScaled' % (obs, label) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Finally, a set of THn for folding H --> CH (with MPI on) + title = ['p_{T}^{ch jet}', 'p_{T}^{h jet}', + obs_name + '^{ch}', obs_name + '^{h}'] + + name = 'hResponse_JetPt_%s_h_ch_MPIon_%sScaled' % (obs, label) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name_MPIon).append(h) + + + #--------------------------------------------------------------- + # Initiate jet defs, selectors, and sd (if required) + #--------------------------------------------------------------- + def init_jet_tools(self): + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + if not self.no_tree: + # Initialize tree writer + name = 'particle_unscaled_R%s' % jetR_str + t = ROOT.TTree(name, name) + setattr(self, "t_R%s" % jetR_str, t) + tw = RTreeWriter(tree=t) + setattr(self, "tw_R%s" % jetR_str, tw) + + # set up our jet definition and a jet selector + jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) + setattr(self, "jet_def_R%s" % jetR_str, jet_def) + print(jet_def) + + pwarning('max eta for particles after hadronization set to', self.max_eta_hadron) + parts_selector_h = fj.SelectorAbsEtaMax(self.max_eta_hadron) + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + jet_selector = fj.SelectorPtMin(5.0) & \ + fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + setattr(self, "jet_selector_R%s" % jetR_str, jet_selector) + + #max_eta_parton = self.max_eta_hadron + 2. * jetR + #setattr(self, "max_eta_parton_R%s" % jetR_str, max_eta_parton) + #pwarning("Max eta for partons with jet R =", jetR, "set to", max_eta_parton) + #parts_selector_p = fj.SelectorAbsEtaMax(max_eta_parton) + #setattr(self, "parts_selector_p_R%s" % jetR_str, parts_selector_p) + + count1 = 0 # Number of jets rejected from ch-h matching + setattr(self, "count1_R%s" % jetR_str, count1) + count2 = 0 # Number of jets rejected from h-p matching + setattr(self, "count2_R%s" % jetR_str, count2) + + #--------------------------------------------------------------- + # Read events from output, find jets, and fill histograms + #--------------------------------------------------------------- + def parse_events(self, MPIon=False): + + if MPIon: + hNevents = self.hNeventsMPI + infile = self.herwig_file_MPI + else: + hNevents = self.hNevents + infile = self.herwig_file + + print("Reading events from %s..." % infile) + + with open(infile, 'r') as f: + ev_num = 0 + + # Flags to assist with keeping track of place within file + reading_ev = False + parton = False + parton_final = False + parton_finished = False + hadron = False + hadron_final = False + + partons_px = [] + partons_py = [] + partons_pz = [] + partons_e = [] + #partons_q = [] + hadrons_px = [] + hadrons_py = [] + hadrons_pz = [] + hadrons_e = [] + #hadrons_q = [] + ch_hadrons_px = [] + ch_hadrons_py = [] + ch_hadrons_pz = [] + ch_hadrons_e = [] + #ch_hadrons_q = [] + + for line in f: + + # Waiting to start reading event + if not reading_ev: + if "Event number" in line: + reading_ev = True + ev_num = int(line.split()[2]) + if not ev_num % 1000: + print("Event number", ev_num, end="\r") + hNevents.Fill(0) + elif "Total integrated xsec:" in line: + if MPIon: + self.xsec_MPI = float(line.split()[3]) + else: + self.xsec = float(line.split()[3]) + continue + + # Reading event + # First step is to read the parton info + elif not parton: + if "ShowerHandler" in line: + parton = True + continue + elif not parton_final: + if "final" in line: + parton_final = True + continue + + # Get showered partons + elif not parton_finished: # and not MPIon + # Read parton information + vals = line.split() + if line[0] == '-': + parton_finished = True + elif len(vals) == 5 and line[2] == ' ': + partons_px.append(vals[0]) + partons_py.append(vals[1]) + partons_pz.append(vals[2]) + partons_e.append(vals[3]) + #partons_q.append(vals[4]) + continue + + # Get final hadrons + elif not hadron: + if "DecayHandler" in line: + hadron = True + continue + elif not hadron_final: + if "final" in line: + hadron_final = True + continue + + # Check if event is over + elif line[0] == '-': + # Finished reading hadron info + reading_ev = False + parton = False + parton_final = False + parton_finished = False + hadron = False + hadron_final = False + + # Get correct structure for finding jets + #partons = None + #hadrons = None + #if not MPIon: + partons = fjext.vectorize_px_py_pz_e( + partons_px, partons_py, partons_pz, partons_e) + + hadrons = fjext.vectorize_px_py_pz_e( + hadrons_px, hadrons_py, hadrons_pz, hadrons_e) + + ch_hadrons = fjext.vectorize_px_py_pz_e( + ch_hadrons_px, ch_hadrons_py, ch_hadrons_pz, ch_hadrons_e) + + self.find_jets_fill_hist(partons, hadrons, ch_hadrons, ev_num, MPIon) + + partons_px = [] + partons_py = [] + partons_pz = [] + partons_e = [] + #partons_q = [] + hadrons_px = [] + hadrons_py = [] + hadrons_pz = [] + hadrons_e = [] + #hadrons_q = [] + ch_hadrons_px = [] + ch_hadrons_py = [] + ch_hadrons_pz = [] + ch_hadrons_e = [] + #ch_hadrons_q = [] + + continue + + elif line[2].isnumeric(): + # Save the hadron name for charge descrimination + i = 1 + while line[i].isnumeric(): + i += 1 + hadron_type = line[i:].split()[0] + continue + + elif line[2] == ' ': # and len(line.split()) == 5: + # Reading hadron information + vals = line.split() + #if not MPIon: + hadrons_px.append(vals[0]) + hadrons_py.append(vals[1]) + hadrons_pz.append(vals[2]) + hadrons_e.append(vals[3]) + #hadrons_q.append(vals[4]) + + if '+' in hadron_type or '-' in hadron_type: + ch_hadrons_px.append(vals[0]) + ch_hadrons_py.append(vals[1]) + ch_hadrons_pz.append(vals[2]) + ch_hadrons_e.append(vals[3]) + #ch_hadrons_q.append(vals[4]) + + return partons, hadrons, ch_hadrons + + #--------------------------------------------------------------- + # Read events from output, find jets, and fill histograms + #--------------------------------------------------------------- + def find_jets_fill_hist(self, partons, hadrons, + ch_hadrons, iev, MPIon=False): + + for jetR in self.jetR_list: + #print("Filling jet histograms for R = %s..." % str(jetR)) + + jetR_str = str(jetR).replace('.', '') + jet_selector = getattr(self, "jet_selector_R%s" % jetR_str) + jet_def = getattr(self, "jet_def_R%s" % jetR_str) + t = None; tw = None; + if not self.no_tree: + t = getattr(self, "t_R%s" % jetR_str) + tw = getattr(self, "tw_R%s" % jetR_str) + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + + #if not (iev+1) % 1000: + # print("Event number %s" % str(iev+1), end='\r') + + try: + jets_ch = fj.sorted_by_pt(jet_selector(jet_def(ch_hadrons))) + jets_p = fj.sorted_by_pt(jet_selector(jet_def(partons))) + jets_h = fj.sorted_by_pt(jet_selector(jet_def(hadrons))) + except ValueError: + print(ch_hadrons, partons, hadrons) + exit() + + if MPIon: + for jet in jets_ch: + self.fill_MPI_histograms(jetR, jet) + + if self.level and not MPIon: # Only save info at one level w/o matching + if not self.no_tree: + jets = locals()["jets_%s" % self.level] + for jet in jets: + self.fill_unmatched_jet_tree(tw, jetR, iev, jet) + continue + + for i,jchh in enumerate(jets_ch): + + # match hadron (full) jet + drhh_list = [] + for j, jh in enumerate(jets_h): + drhh = jchh.delta_R(jh) + if drhh < jetR / 2.: + drhh_list.append((j,jh)) + if len(drhh_list) != 1: + count1 += 1 + else: # Require unique match + j, jh = drhh_list[0] + + # match parton level jet + dr_list = [] + for k, jp in enumerate(jets_p): + dr = jh.delta_R(jp) + if dr < jetR / 2.: + dr_list.append((k, jp)) + if len(dr_list) != 1: + count2 += 1 + else: + k, jp = dr_list[0] + + if self.debug_level > 0: + pwarning('event', iev) + pinfo('matched jets: ch.h:', jchh.pt(), 'h:', jh.pt(), + 'p:', jp.pt(), 'dr:', dr) + + if not MPIon: + self.fill_jet_histograms(jetR, jp, jh, jchh) + if not self.no_tree: + self.fill_matched_jet_tree(tw, jetR, iev, jp, jh, jchh) + else: + self.fill_jet_histograms_MPI(jetR, jp, jh, jchh) + + #print(" |-> SD jet params z={0:10.3f} dR={1:10.3f} mu={2:10.3f}".format( + # sd_info.z, sd_info.dR, sd_info.mu)) + + if MPIon: + setattr(self, "count1_R%s_MPIon" % jetR_str, count1) + setattr(self, "count2_R%s_MPIon" % jetR_str, count2) + else: + setattr(self, "count1_R%s" % jetR_str, count1) + setattr(self, "count2_R%s" % jetR_str, count2) + + + #--------------------------------------------------------------- + # Fill jet tree with (unscaled/raw) matched parton/hadron tracks + #--------------------------------------------------------------- + def fill_matched_jet_tree(self, tw, jetR, iev, jp, jh, jchh): + + tw.fill_branch('iev', iev) + tw.fill_branch('ch', jchh) + tw.fill_branch('h', jh) + tw.fill_branch('p', jp) + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + # Groomed jets + gshop_chh = fjcontrib.GroomerShop(jchh, jetR, self.reclustering_algorithm) + jet_ch_groomed_lund = self.utils.groom(gshop_chh, gs, jetR) + if not jet_ch_groomed_lund: + continue + + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jet_h_groomed_lund = self.utils.groom(gshop_h, gs, jetR) + if not jet_h_groomed_lund: + continue + + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jet_p_groomed_lund = self.utils.groom(gshop_p, gs, jetR) + if not jet_p_groomed_lund: + continue + + obs_dict = None + for obs in self.observables: + if obs == "theta_g": + obs_dict = { + "p" : jet_p_groomed_lund.Delta() / jetR, + "h" : jet_h_groomed_lund.Delta() / jetR, + "ch": jet_ch_groomed_lund.Delta() / jetR } + elif obs == "zg": + if self.skip_zg: + continue + obs_dict = { + "p" : jet_p_groomed_lund.z(), + "h" : jet_h_groomed_lund.z(), + "ch": jet_ch_groomed_lund.z() } + else: + raise ValueError("Unrecognized observable " + obs) + + for level in ["p", "h", "ch"]: + tw.fill_branch("%s_%s_%s" % (obs, level, gl), obs_dict[level]) + + + #--------------------------------------------------------------- + # Fill jet tree with (unscaled/raw) unmatched parton/hadron tracks + #--------------------------------------------------------------- + def fill_unmatched_jet_tree(self, tw, jetR, iev, jet): + + tw.fill_branch('iev', iev) + tw.fill_branch(self.level, jet) + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + # Groomed jet + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, gs, jetR) + if not jet_groomed_lund: + continue + + obs_val = None + for obs in self.observables: + if obs == "theta_g": + obs_val = jet_p_groomed_lund.Delta() / jetR + elif obs == "zg": + if self.skip_zg: + continue + obs_val = jet_p_groomed_lund.z() + + tw.fill_branch("%s_%s_%s" % (obs, self.level, gl), obs_val) + + + #--------------------------------------------------------------- + # Fill jet histograms for MPI-on PYTHIA run-through + #--------------------------------------------------------------- + def fill_MPI_histograms(self, jetR, jet): + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + label = "R" + str(jetR) + '_' + gl + + # Groomed jet + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, gs, jetR) + if not jet_groomed_lund: + continue + + obs_val = None + for obs in self.observables: + if obs == "theta_g": + obs_val = jet_groomed_lund.Delta() / jetR + elif obs == "zg": + if self.skip_zg: + continue + obs_val = jet_groomed_lund.z() + else: + raise ValueError("Unrecognized observable " + obs) + + getattr(self, 'h_JetPt_%s_ch_MPIon_%sScaled' % (obs, label)).Fill(jet.pt(), obs_val) + + + #--------------------------------------------------------------- + # Fill jet histograms + #--------------------------------------------------------------- + def fill_jet_histograms(self, jetR, jp, jh, jch): + + R_label = str(jetR) + 'Scaled' + + # Fill jet histograms which are not dependant on angualrity + if self.level in [None, 'ch']: + getattr(self, 'hJetPt_ch_R%s' % R_label).Fill(jch.pt()) + getattr(self, 'hNconstit_Pt_ch_R%s' % R_label).Fill(jch.pt(), len(jch.constituents())) + if self.level in [None, 'h']: + getattr(self, 'hJetPt_h_R%s' % R_label).Fill(jh.pt()) + getattr(self, 'hNconstit_Pt_h_R%s' % R_label).Fill(jh.pt(), len(jh.constituents())) + if self.level in [None, 'p']: + getattr(self, 'hJetPt_p_R%s' % R_label).Fill(jp.pt()) + getattr(self, 'hNconstit_Pt_p_R%s' % R_label).Fill(jp.pt(), len(jp.constituents())) + + if self.level == None: + if jp.pt(): # prevent divide by 0 + getattr(self, 'hJetPtRes_R%s' % R_label).Fill(jp.pt(), (jp.pt() - jch.pt()) / jp.pt()) + getattr(self, 'hResponse_JetPt_R%s' % R_label).Fill(jp.pt(), jch.pt()) + + # Fill angularity histograms and response matrices + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + self.fill_RMs(jetR, gs, gl, jp, jh, jch) + + + #--------------------------------------------------------------- + # Fill jet histograms + #--------------------------------------------------------------- + def fill_RMs(self, jetR, gs, gl, jp, jh, jch): + + # Groomed jets + gshop_chh = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) + jet_ch_groomed_lund = self.utils.groom(gshop_chh, gs, jetR) + if not jet_ch_groomed_lund: + return + + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jet_h_groomed_lund = self.utils.groom(gshop_h, gs, jetR) + if not jet_h_groomed_lund: + return + + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jet_p_groomed_lund = self.utils.groom(gshop_p, gs, jetR) + if not jet_p_groomed_lund: + return + + label = "R%s_%s" % (jetR, gl) + obs_dict = None + for obs in self.observables: + if obs == "theta_g": + obs_dict = { + "p" : jet_p_groomed_lund.Delta() / jetR, + "h" : jet_h_groomed_lund.Delta() / jetR, + "ch": jet_ch_groomed_lund.Delta() / jetR } + elif obs == "zg": + if self.skip_zg: + continue + obs_dict = { + "p" : jet_p_groomed_lund.z(), + "h" : jet_h_groomed_lund.z(), + "ch": jet_ch_groomed_lund.z() } + else: + raise ValueError("Unrecognized observable " + obs) + + if self.level in [None, 'ch']: + getattr(self, 'h_JetPt_%s_ch_MPIoff_%sScaled' % (obs, label)).Fill(jch.pt(), obs_dict['ch']) + + if self.level in [None, 'h']: + getattr(self, 'h_JetPt_%s_h_MPIoff_%sScaled' % (obs, label)).Fill(jh.pt(), obs_dict['h']) + + if self.level in [None, 'p']: + getattr(self, 'h_JetPt_%s_p_MPIoff_%sScaled' % (obs, label)).Fill(jp.pt(), obs_dict['p']) + + if self.level == None: + getattr(self, 'hResponse_%s_p_ch_MPIoff_%sScaled' % (obs, label)).Fill(obs_dict['p'], obs_dict['ch']) + + # Residual plots (with and without divisor in y-axis) + getattr(self, "hDiff_JetPt_%s_MPIoff_%sScaled" % (obs, label)).Fill( + jch.pt(), obs_dict['p'] - obs_dict['ch']) + if obs_dict['p']: # prevent divide by 0 + getattr(self, "hResidual_JetPt_%s_MPIoff_%sScaled" % (obs, label)).Fill( + jp.pt(), (obs_dict['p'] - obs_dict['ch']) / obs_dict['p']) + + # 4D response matrices for "forward folding" to ch level + x = ([jch.pt(), jp.pt(), obs_dict['ch'], obs_dict['p']]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_p_ch_MPIoff_%sScaled' % (obs, label)).Fill(x_array) + + x = ([jh.pt(), jp.pt(), obs_dict['h'], obs_dict['p']]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_p_h_MPIoff_%sScaled' % (obs, label)).Fill(x_array) + + + #--------------------------------------------------------------- + # Fill jet histograms for MPI (which are just the H-->CH RMs) + #--------------------------------------------------------------- + def fill_jet_histograms_MPI(self, jetR, jp, jh, jch): + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + # Groomed jets + gshop_chh = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) + jet_ch_groomed_lund = self.utils.groom(gshop_chh, gs, jetR) + if not jet_ch_groomed_lund: + continue + + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jet_h_groomed_lund = self.utils.groom(gshop_h, gs, jetR) + if not jet_h_groomed_lund: + continue + + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jet_p_groomed_lund = self.utils.groom(gshop_p, gs, jetR) + if not jet_p_groomed_lund: + continue + + label = "R%s_%s" % (jetR, gl) + obs_dict = None + for obs in self.observables: + if obs == "theta_g": + obs_dict = { + "p" : jet_p_groomed_lund.Delta() / jetR, + "h" : jet_h_groomed_lund.Delta() / jetR, + "ch": jet_ch_groomed_lund.Delta() / jetR } + elif obs == "zg": + if self.skip_zg: + continue + obs_dict = { + "p" : jet_p_groomed_lund.z(), + "h" : jet_h_groomed_lund.z(), + "ch": jet_ch_groomed_lund.z() } + else: + raise ValueError("Unrecognized observable " + obs) + + # 4D response matrices for "forward folding" from h to ch level + x = ([jch.pt(), jh.pt(), obs_dict['ch'], obs_dict['h']]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_h_ch_MPIon_%sScaled' % (obs, label)).Fill(x_array) + + + #--------------------------------------------------------------- + # Initiate scaling of all histograms and print final simulation info + #--------------------------------------------------------------- + def scale_print_final_info(self): + + # Scale all jet histograms by the appropriate factor from generated cross section + # and the number of accepted events + if not self.no_scale: + scale_f = self.xsec / self.hNevents.GetBinContent(1) + print("Weight MPIoff histograms by (cross section)/(N events) =", scale_f) + + MPI_scale_f = None + if self.herwig_file_MPI: + MPI_scale_f = self.xsec_MPI / self.hNeventsMPI.GetBinContent(1) + print("Weight MPIon histograms by (cross section)/(N events) =", MPI_scale_f) + + self.scale_jet_histograms(scale_f, MPI_scale_f) + print() + + self.hNevents.SetBinError(1, 0) + if self.herwig_file_MPI: + self.hNeventsMPI.SetBinError(1, 0) + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + print(("For R=%s: %i jets cut at first match criteria; " + \ + "%i jets cut at second match criteria.") % + (str(jetR), count1, count2)) + print() + + + #--------------------------------------------------------------- + # Scale all jet histograms by sigma/N + #--------------------------------------------------------------- + def scale_jet_histograms(self, scale_f, MPI_scale_f): + + for jetR in self.jetR_list: + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + for h in getattr(self, hist_list_name): + h.Scale(scale_f) + + hist_list_MPIon_name = "hist_list_MPIon_R%s" % str(jetR).replace('.', '') + for h in getattr(self, hist_list_MPIon_name): + h.Scale(MPI_scale_f) + + +################################################################ +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Herwig7 debug level-1 output parser', + prog=os.path.basename(__file__)) + parser.add_argument('-i', '--input-file', action='store', type=str, default='LHC.log', + help='Input .log file from Herwig7 analysis') + parser.add_argument('-m', '--input-file-mpi', action='store', type=str, default=None, + help='Input .log file with MPI on from Herwig7 analysis') + parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', + help='Output directory for generated ROOT file(s)') + parser.add_argument('--tree-output-fname', default="AnalysisResults.root", type=str, + help="Filename for the (unscaled) generated particle ROOT TTree") + parser.add_argument('--no-tree', default=False, action='store_true', + help="Do not save tree of particle information, only create histograms") + parser.add_argument('--no-match-level', help="Save simulation for only one level with " + \ + "no matching. Options: 'p', 'h', 'ch'", default=None, type=str) + parser.add_argument('--no-scale', help="Turn off rescaling all histograms by cross section / N", + action='store_true', default=False) + parser.add_argument('-c', '--config-file', action='store', type=str, + default='config/angularity.yaml', + help="Path of config file for observable configurations") + args = parser.parse_args() + + if args.no_match_level not in [None, 'p', 'h', 'ch']: + print("ERROR: Unrecognized type %s. Please use 'p', 'h', or 'ch'" % args.type_only) + exit(1) + + # If invalid configFile is given, exit + if not os.path.exists(args.config_file): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + process = herwig_parton_hadron( + config_file=args.config_file, output_dir=args.output_dir, args=args) + process.herwig_parton_hadron(args) diff --git a/pyjetty/alice_analysis/process/user/james/process_data_jet_axis.py b/pyjetty/alice_analysis/process/user/james/process_data_jet_axis.py index 4e682d4b3..215a7fc93 100755 --- a/pyjetty/alice_analysis/process/user/james/process_data_jet_axis.py +++ b/pyjetty/alice_analysis/process/user/james/process_data_jet_axis.py @@ -70,9 +70,9 @@ def initialize_user_output_objects(self): #--------------------------------------------------------------- # This function is called once for each jet subconfiguration #--------------------------------------------------------------- - def fill_jet_histograms(self, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, - obs_label, jet_pt_ungroomed, suffix): - + def fill_jet_histograms(self, observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix): + # Recluster with WTA (with larger jet R) jet_def_wta = fj.JetDefinition(fj.cambridge_algorithm, 2*jetR) jet_def_wta.set_recombination_scheme(fj.WTA_pt_scheme) diff --git a/pyjetty/alice_analysis/process/user/james/process_data_subjet_z.py b/pyjetty/alice_analysis/process/user/james/process_data_subjet_z.py index 39b92bd6c..4cc6e71b1 100755 --- a/pyjetty/alice_analysis/process/user/james/process_data_subjet_z.py +++ b/pyjetty/alice_analysis/process/user/james/process_data_subjet_z.py @@ -94,52 +94,51 @@ def initialize_user_output_objects(self): h.GetXaxis().SetTitle('p_{T,ch jet}') h.GetYaxis().SetTitle('z') setattr(self, name, h) - + #--------------------------------------------------------------- # This function is called once for each jet subconfiguration #--------------------------------------------------------------- - def fill_jet_histograms(self, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, - obs_label, jet_pt_ungroomed, suffix): - + def fill_jet_histograms(self, observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix): + if (jetR - obs_setting) < 1e-3: return - + # For a given jet, find all inclusive subjets of a given subjet radius cs_subjet = fj.ClusterSequence(jet.constituents(), self.subjet_def[obs_setting]) subjets = fj.sorted_by_pt(cs_subjet.inclusive_jets()) - - for observable in self.observable_list: - # Fill inclusive subjets - if 'inclusive' in observable: - for subjet in subjets: - z = subjet.pt() / jet.pt() - - # If z=1, it will be default be placed in overflow bin -- prevent this - if np.isclose(z, 1.): - z = 0.999 - - getattr(self, 'h_{}_JetPt_R{}_{}{}'.format(observable, jetR, obs_setting, suffix)).Fill(jet.pt(), z) - - # Fill leading subjets - if 'leading' in observable: - leading_subjet = self.utils.leading_jet(subjets) - z_leading = leading_subjet.pt() / jet.pt() - + # Fill inclusive subjets + if 'inclusive' in observable: + for subjet in subjets: + z = subjet.pt() / jet.pt() + # If z=1, it will be default be placed in overflow bin -- prevent this - if np.isclose(z_leading, 1.): - z_leading = 0.999 - - getattr(self, 'h_{}_JetPt_R{}_{}{}'.format(observable, jetR, obs_setting, suffix)).Fill(jet.pt(), z_leading) - - # Fill z of subjet constituents for z~1 subjets - if z_leading > 0.99: - name = 'h_{}_zconst_R{}_{}_z099_1{}'.format(observable, jetR, obs_setting, suffix) - for p in leading_subjet.constituents(): - z = p.pt() / leading_subjet.pt() - if np.isclose(z, 1.): - z = 0.999 - getattr(self, name).Fill(jet.pt(), z) + if np.isclose(z, 1.): + z = 0.999 + + getattr(self, 'h_{}_JetPt_R{}_{}{}'.format(observable, jetR, obs_setting, suffix)).Fill(jet.pt(), z) + + # Fill leading subjets + if 'leading' in observable: + leading_subjet = self.utils.leading_jet(subjets) + z_leading = leading_subjet.pt() / jet.pt() + + # If z=1, it will be default be placed in overflow bin -- prevent this + if np.isclose(z_leading, 1.): + z_leading = 0.999 + + getattr(self, 'h_{}_JetPt_R{}_{}{}'.format(observable, jetR, obs_setting, suffix)).Fill(jet.pt(), z_leading) + + # Fill z of subjet constituents for z~1 subjets + if z_leading > 0.99: + name = 'h_{}_zconst_R{}_{}_z099_1{}'.format(observable, jetR, obs_setting, suffix) + for p in leading_subjet.constituents(): + z = p.pt() / leading_subjet.pt() + if np.isclose(z, 1.): + z = 0.999 + getattr(self, name).Fill(jet.pt(), z) + ################################################################## if __name__ == '__main__': diff --git a/pyjetty/alice_analysis/process/user/james/process_data_theta_g.py b/pyjetty/alice_analysis/process/user/james/process_data_theta_g.py index ff5e299f1..53e5a53f6 100755 --- a/pyjetty/alice_analysis/process/user/james/process_data_theta_g.py +++ b/pyjetty/alice_analysis/process/user/james/process_data_theta_g.py @@ -3,7 +3,7 @@ """ Analysis class to read a ROOT TTree of track information and do jet-finding, and save basic histograms. - + Author: James Mulligan (james.mulligan@berkeley.edu) """ @@ -32,7 +32,7 @@ class ProcessData_theta_g(process_data_base.ProcessDataBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessData_theta_g, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) @@ -40,64 +40,68 @@ def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, # Initialize histograms #--------------------------------------------------------------- def initialize_user_output_objects(self): - + for jetR in self.jetR_list: - for observable in self.observable_list: - if observable == 'theta_g': - - for grooming_setting in self.obs_grooming_settings[observable]: - if grooming_setting: - grooming_label = self.utils.grooming_label(grooming_setting) - if self.is_pp: - name = 'h_{}_JetPt_R{}_{}'.format(observable, jetR, grooming_label) + # LaTeX-formatted observable name + obs_name = self.obs_names[observable] + + # Get the maximum observable value for the histograms + obs_max = None + if observable == "theta_g": + obs_max = 1.0 + elif observable == "zg": + obs_max = 0.5 + else: + # No other observables are implemented in this script + raise ValueError("Observable %s not implemented" % observable) + + for grooming_setting in self.obs_grooming_settings[observable]: + if grooming_setting: + grooming_label = self.utils.grooming_label(grooming_setting) + if self.is_pp: + name = "h_%s_JetPt_R%s_%s" % (observable, jetR, grooming_label) + h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0, 1.0) + h.GetXaxis().SetTitle("#it{p}_{T}^{ch jet}") + h.GetYaxis().SetTitle(obs_name) + setattr(self, name, h) + else: + for R_max in self.max_distance: + name = "h_%s_JetPt_R%s_%s_Rmax%s" % (observable, jetR, grooming_label, R_max) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0, 1.0) - h.GetXaxis().SetTitle('p_{T,ch jet}') - h.GetYaxis().SetTitle('#theta_{g,ch}') - setattr(self, name, h) - else: - for R_max in self.max_distance: - name = 'h_{}_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, grooming_label, R_max) - h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0, 1.0) - h.GetXaxis().SetTitle('p_{T,ch jet}') - h.GetYaxis().SetTitle('#theta_{g,ch}') - setattr(self, name, h) - - if observable == 'zg': - - for grooming_setting in self.obs_grooming_settings[observable]: - if grooming_setting: - grooming_label = self.utils.grooming_label(grooming_setting) - if self.is_pp: - name = 'h_{}_JetPt_R{}_{}'.format(observable, jetR, grooming_label) - h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0, 0.5) - h.GetXaxis().SetTitle('p_{T,ch jet}') - h.GetYaxis().SetTitle('z_{g,ch}') + h.GetXaxis().SetTitle("#it{p}_{T}^{ch jet}") + h.GetYaxis().SetTitle(obs_name) setattr(self, name, h) - else: - for R_max in self.max_distance: - name = 'h_{}_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, grooming_label, R_max) - h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0, 0.5) - h.GetXaxis().SetTitle('p_{T,ch jet}') - h.GetYaxis().SetTitle('z_{g,ch}') - setattr(self, name, h) + #--------------------------------------------------------------- # This function is called once for each jet subconfiguration #--------------------------------------------------------------- - def fill_jet_histograms(self, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, - obs_label, jet_pt_ungroomed, suffix): - - # Get groomed observables from Lund object - theta_g = jet_groomed_lund.Delta() / jetR - zg = jet_groomed_lund.z() - + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + if observable == "theta_g": + return jet_groomed_lund.Delta() / jetR + + elif observable == "zg": + return zg = jet_groomed_lund.z() + + # No other observables are implemented in this script + raise ValueError("Observable %s not implemented" % observable) + + + #--------------------------------------------------------------- + # This function is called once for each jet subconfiguration + #--------------------------------------------------------------- + def fill_jet_histograms(self, observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix): + # Fill histograms - if grooming_setting in self.obs_grooming_settings['theta_g']: - getattr(self, 'h_theta_g_JetPt_R{}_{}{}'.format(jetR, obs_label, suffix)).Fill(jet_pt_ungroomed, theta_g) - if grooming_setting in self.obs_grooming_settings['zg']: - getattr(self, 'h_zg_JetPt_R{}_{}{}'.format(jetR, obs_label, suffix)).Fill(jet_pt_ungroomed, zg) + getattr(self, "h_%s_JetPt_R%s_%s%s" % (observable, jetR, obs_label, suffix)).Fill( + jet_pt_ungroomed, self.calculate_observable(observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed)) + ################################################################## if __name__ == '__main__': @@ -115,21 +119,21 @@ def fill_jet_histograms(self, jet, jet_groomed_lund, jetR, obs_setting, grooming type=str, metavar='outputDir', default='./TestOutput', help='Output directory for output to be written to') - + # Parse the arguments args = parser.parse_args() - + print('Configuring...') print('inputFile: \'{0}\''.format(args.inputFile)) print('configFile: \'{0}\''.format(args.configFile)) print('ouputDir: \'{0}\"'.format(args.outputDir)) print('----------------------------------------------------------------') - + # If invalid inputFile is given, exit if not os.path.exists(args.inputFile): print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) sys.exit(0) - + # If invalid configFile is given, exit if not os.path.exists(args.configFile): print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) diff --git a/pyjetty/alice_analysis/process/user/james/process_mc_jet_axis.py b/pyjetty/alice_analysis/process/user/james/process_mc_jet_axis.py index e4e1f8b44..ab9e7f508 100755 --- a/pyjetty/alice_analysis/process/user/james/process_mc_jet_axis.py +++ b/pyjetty/alice_analysis/process/user/james/process_mc_jet_axis.py @@ -40,25 +40,25 @@ class ProcessMC_jet_axis(process_mc_base.ProcessMCBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessMC_jet_axis, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) - + self.observable = self.observable_list[0] #--------------------------------------------------------------- # Initialize histograms #--------------------------------------------------------------- def initialize_user_output_objects_R(self, jetR): - + for i, axes in enumerate(self.obs_settings[self.observable]): - + grooming_setting = self.obs_grooming_settings[self.observable][i] if grooming_setting: grooming_label = self.utils.grooming_label(grooming_setting) else: grooming_label = '' - + name = 'hResidual_JetPt_{}_R{}_{}{}'.format(self.observable, jetR, axes, grooming_label) h = ROOT.TH2F(name, name, 300, 0, 300, 100, -1*jetR, jetR) h.GetXaxis().SetTitle('p_{T,truth}') @@ -75,63 +75,64 @@ def initialize_user_output_objects_R(self, jetR): self.create_thn(name, title, dim, nbins, min, max) #--------------------------------------------------------------- - # This function is called once for each jet subconfiguration - # Fill 2D histogram of (pt, obs) + # Calculate the WTA jet given a 'standard' jet #--------------------------------------------------------------- - def fill_observable_histograms(self, hname, jet, jet_groomed_lund, jetR, obs_setting, - grooming_setting, obs_label, jet_pt_ungroomed): - + def get_wta_jet(self, jet, jetR): + # Recluster with WTA (with larger jet R) jet_def_wta = fj.JetDefinition(fj.cambridge_algorithm, 2*jetR) jet_def_wta.set_recombination_scheme(fj.WTA_pt_scheme) if self.debug_level > 3: print('WTA jet definition is:', jet_def_wta) reclusterer_wta = fjcontrib.Recluster(jet_def_wta) - jet_wta = reclusterer_wta.result(jet) + return reclusterer_wta.result(jet) + + #--------------------------------------------------------------- + # Calculate the observable given a jet + #--------------------------------------------------------------- + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): - # Compute jet axis differences - jet_groomed = jet_groomed_lund.pair() if obs_setting == 'Standard_SD': - deltaR = jet.delta_R(jet_groomed) + return jet.delta_R(jet_groomed_lund.pair()) + elif obs_setting == 'Standard_WTA': - deltaR = jet.delta_R(jet_wta) + return jet.delta_R(self.get_wta_jet(jet, jetR)) + elif obs_setting == 'WTA_SD': - deltaR = jet_groomed.delta_R(jet_wta) + return jet_groomed_lund.pair().delta_R(self.get_wta_jet(jet, jetR)) + + # Should not be any other observable + raise ValueError("Observable %s not implemented" % observable) + + #--------------------------------------------------------------- + # This function is called once for each jet subconfiguration + # Fill 2D histogram of (pt, obs) + #--------------------------------------------------------------- + def fill_observable_histograms(self, observable, hname, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + deltaR = self.calculate_observable(observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed) + + getattr(self, hname.format(observable, jetR, obs_label)).Fill(jet_pt_ungroomed, deltaR) - getattr(self, hname.format(self.observable, jetR, obs_label)).Fill(jet_pt_ungroomed, deltaR) - #--------------------------------------------------------------- # Fill matched jet histograms #--------------------------------------------------------------- - def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, - jet_truth_groomed_lund, jet_pp_det, jetR, - obs_setting, grooming_setting, obs_label, - jet_pt_det_ungroomed, jet_pt_truth_ungroomed, R_max, suffix): + def fill_matched_jet_histograms(self, observable, jet_det, jet_det_groomed_lund, + jet_truth, jet_truth_groomed_lund, jet_pp_det, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + R_max, suffix, **kwargs): - # Recluster with WTA (with larger jet R) - jet_def_wta = fj.JetDefinition(fj.cambridge_algorithm, 2*jetR) - jet_def_wta.set_recombination_scheme(fj.WTA_pt_scheme) - if self.debug_level > 3: - print('WTA jet definition is:', jet_def_wta) - reclusterer_wta = fjcontrib.Recluster(jet_def_wta) - jet_det_wta = reclusterer_wta.result(jet_det) - jet_truth_wta = reclusterer_wta.result(jet_truth) + deltaR_det = self.calculate_observable(observable, jet_det, jet_det_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_det_ungroomed) + + deltaR_truth = self.calculate_observable(observable, jet_truth, jet_truth_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_truth_ungroomed) - # Compute jet axis differences - jet_det_groomed = jet_det_groomed_lund.pair() - jet_truth_groomed = jet_truth_groomed_lund.pair() - if obs_setting == 'Standard_SD': - deltaR_det = jet_det.delta_R(jet_det_groomed) - deltaR_truth = jet_truth.delta_R(jet_truth_groomed) - elif obs_setting == 'Standard_WTA': - deltaR_det = jet_det.delta_R(jet_det_wta) - deltaR_truth = jet_truth.delta_R(jet_truth_wta) - elif obs_setting == 'WTA_SD': - deltaR_det = jet_det_groomed.delta_R(jet_det_wta) - deltaR_truth = jet_truth_groomed.delta_R(jet_truth_wta) - # Fill response - self.fill_response(self.observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, deltaR_det, deltaR_truth, obs_label, R_max, prong_match = False) ################################################################## @@ -150,10 +151,10 @@ def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, type=str, metavar='outputDir', default='./TestOutput', help='Output directory for output to be written to') - + # Parse the arguments args = parser.parse_args() - + print('Configuring...') print('inputFile: \'{0}\''.format(args.inputFile)) print('configFile: \'{0}\''.format(args.configFile)) @@ -163,7 +164,7 @@ def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, if not os.path.exists(args.inputFile): print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) sys.exit(0) - + # If invalid configFile is given, exit if not os.path.exists(args.configFile): print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) diff --git a/pyjetty/alice_analysis/process/user/james/process_mc_subjet_z.py b/pyjetty/alice_analysis/process/user/james/process_mc_subjet_z.py index b000e3023..d3b3717f1 100755 --- a/pyjetty/alice_analysis/process/user/james/process_mc_subjet_z.py +++ b/pyjetty/alice_analysis/process/user/james/process_mc_subjet_z.py @@ -3,7 +3,7 @@ """ Analysis class to read a ROOT TTree of MC track information and do jet-finding, and save response histograms. - + Author: James Mulligan (james.mulligan@berkeley.edu) """ @@ -40,10 +40,10 @@ class ProcessMC_subjet_z(process_mc_base.ProcessMCBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessMC_subjet_z, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) - + # User-specific initialization self.initialize_user_config() @@ -51,12 +51,12 @@ def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, # Initialize config file into class members #--------------------------------------------------------------- def initialize_user_config(self): - + # Define subjet finders (from first observable defined) self.subjet_def = {} for subjetR in self.obs_settings[self.observable_list[0]]: self.subjet_def[subjetR] = fj.JetDefinition(fj.antikt_algorithm, subjetR) - + #--------------------------------------------------------------- # Initialize histograms #--------------------------------------------------------------- @@ -65,19 +65,19 @@ def initialize_user_output_objects_R(self, jetR): for observable in self.observable_list: for subjetR in self.obs_settings[observable]: - - obs_label = self.utils.obs_label(subjetR, None) + + obs_label = self.utils.obs_label(subjetR, None) if (jetR - subjetR) < 1e-3: continue - + # Truth histograms name = 'h_{}_JetPt_Truth_R{}_{}'.format(observable, jetR, subjetR) h = ROOT.TH2F(name, name, 20, 0, 200, 100, 0, 1.0) h.GetXaxis().SetTitle('p_{T,ch jet}') h.GetYaxis().SetTitle('z_{r}') setattr(self, name, h) - + if self.thermal_model: for R_max in self.max_distance: name = 'h_{}_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, obs_label, R_max) @@ -85,10 +85,10 @@ def initialize_user_output_objects_R(self, jetR): h.GetXaxis().SetTitle('p_{T,ch jet}') h.GetYaxis().SetTitle('#z_{r}') setattr(self, name, h) - + # Subjet matching histograms if not self.is_pp: - + for R_max in self.max_distance: # Subjet matching histograms -- only need one set for inclusive/leading @@ -96,11 +96,11 @@ def initialize_user_output_objects_R(self, jetR): name = 'hDeltaR_combined_ppdet_subjet_z_R{}_{}_Rmax{}'.format(jetR, subjetR, R_max) h = ROOT.TH2F(name, name, 200, 0, 200, 100, 0., 2.) setattr(self, name, h) - + name = 'hDeltaR_ppdet_pptrue_subjet_z_R{}_{}_Rmax{}'.format(jetR, subjetR, R_max) h = ROOT.TH2F(name, name, 200, 0, 200, 100, 0., 2.) setattr(self, name, h) - + # Plot deltaR distribution between the truth-detector leading subjets # (since they are not matched geometrically, and can contain "swaps") if 'leading' in observable: @@ -110,7 +110,7 @@ def initialize_user_output_objects_R(self, jetR): h.GetYaxis().SetTitle('#it{z_{r}}') h.GetZaxis().SetTitle('#DeltaR') setattr(self, name, h) - + # Create prong matching histograms name = 'h_{}_matched_pt_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, subjetR, R_max) h = ROOT.TH3F(name, name, 20, 0, 200, 100, 0, 1.0, 10, 0., 1.) @@ -118,29 +118,29 @@ def initialize_user_output_objects_R(self, jetR): h.GetYaxis().SetTitle('#it{z_{r,det}}') h.GetZaxis().SetTitle('Matched p_{T,det} fraction') setattr(self, name, h) - + name = 'h_{}_matched_pt_deltaZ_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, subjetR, R_max) h = ROOT.TH3F(name, name, 20, 0, 200, 10, 0, 1.0, 100, -1., 1.) h.GetXaxis().SetTitle('p_{T,ch jet,truth}') h.GetYaxis().SetTitle('Matched p_{T,det} fraction') h.GetZaxis().SetTitle('#Delta#it{z_{r}}') setattr(self, name, h) - + name = 'h_{}_matched_pt_deltaR_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, subjetR, R_max) h = ROOT.TH3F(name, name, 20, 0, 200, 10, 0, 1.0, 100, 0., 1.) h.GetXaxis().SetTitle('p_{T,ch jet,truth}') h.GetYaxis().SetTitle('Matched p_{T,det} fraction') h.GetZaxis().SetTitle('#Delta#it{R}') setattr(self, name, h) - + else: - + # Subjet matching histograms -- only need one set for inclusive/leading if observable == self.observable_list[0]: name = 'hDeltaR_ppdet_pptrue_subjet_z_R{}_{}'.format(jetR, subjetR) h = ROOT.TH2F(name, name, 200, 0, 200, 100, 0., 2.) setattr(self, name, h) - + # Plot deltaR distribution between the truth-detector leading subjets # (since they are not matched geometrically, and can contain "swaps") if 'leading' in observable: @@ -150,7 +150,7 @@ def initialize_user_output_objects_R(self, jetR): h.GetYaxis().SetTitle('#it{z_{r}}') h.GetZaxis().SetTitle('#DeltaR') setattr(self, name, h) - + # Plot fraction of det-level subjets without a unique match, as a function of z if 'inclusive' in observable: name = 'h_match_fraction_{}_R{}_{}'.format(observable, jetR, subjetR) @@ -162,17 +162,17 @@ def initialize_user_output_objects_R(self, jetR): # Residuals and responses for subjetR in self.obs_settings[observable]: - + if (jetR - subjetR) < 1e-3: continue - + if not self.is_pp: - + for R_max in self.max_distance: self.create_response_histograms(observable, jetR, subjetR, R_max) if 'leading' in observable and R_max == self.main_R_max: self.create_response_histograms(observable, jetR, subjetR, '{}_matched'.format(R_max)) - + else: self.create_response_histograms(observable, jetR, subjetR) @@ -181,7 +181,7 @@ def initialize_user_output_objects_R(self, jetR): # Fill 2D histogram of (pt, obs) #--------------------------------------------------------------- def create_response_histograms(self, observable, jetR, subjetR, R_max = None): - + if R_max: suffix = '_Rmax{}'.format(R_max) else: @@ -195,7 +195,7 @@ def create_response_histograms(self, observable, jetR, subjetR, R_max = None): max = [150., 200., 1., 1.] name = 'hResponse_JetPt_{}_R{}_{}{}'.format(observable, jetR, subjetR, suffix) self.create_thn(name, title, dim, nbins, min, max) - + name = 'hResidual_JetPt_{}_R{}_{}{}'.format(observable, jetR, subjetR, suffix) h = ROOT.TH3F(name, name, 20, 0, 200, 100, 0., 1., 200, -2., 2.) h.GetXaxis().SetTitle('p_{T,truth}') @@ -207,46 +207,44 @@ def create_response_histograms(self, observable, jetR, subjetR, R_max = None): # This function is called once for each jet subconfiguration # Fill 2D histogram of (pt, obs) #--------------------------------------------------------------- - def fill_observable_histograms(self, hname, jet, jet_groomed_lund, jetR, obs_setting, - grooming_setting, obs_label, jet_pt_ungroomed): - + def fill_observable_histograms(self, observable, hname, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + if (jetR - obs_setting) < 1e-3: return - + # For a given jet, find inclusive subjets of a given subjet radius cs_subjet = fj.ClusterSequence(jet.constituents(), self.subjet_def[obs_setting]) subjets = fj.sorted_by_pt(cs_subjet.inclusive_jets()) - - for observable in self.observable_list: - - # Fill inclusive subjets - if 'inclusive' in observable: - for subjet in subjets: - z = subjet.pt() / jet.pt() - getattr(self, hname.format(observable, obs_label)).Fill(jet.pt(), z) - - # Fill leading subjets - if 'leading' in observable: - leading_subjet = self.utils.leading_jet(subjets) - z_leading = leading_subjet.pt() / jet.pt() - getattr(self, hname.format(observable, obs_label)).Fill(jet.pt(), z_leading) - + + # Fill inclusive subjets + if 'inclusive' in observable: + for subjet in subjets: + z = subjet.pt() / jet.pt() + getattr(self, hname.format(observable, obs_label)).Fill(jet.pt(), z) + + # Fill leading subjets + if 'leading' in observable: + leading_subjet = self.utils.leading_jet(subjets) + z_leading = leading_subjet.pt() / jet.pt() + getattr(self, hname.format(observable, obs_label)).Fill(jet.pt(), z_leading) + #--------------------------------------------------------------- # Fill matched jet histograms #--------------------------------------------------------------- - def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, - jet_truth_groomed_lund, jet_pp_det, jetR, - obs_setting, grooming_setting, obs_label, - jet_pt_det_ungroomed, jet_pt_truth_ungroomed, R_max, suffix, **kwargs): - + def fill_matched_jet_histograms(self, observable, jet_det, jet_det_groomed_lund, + jet_truth, jet_truth_groomed_lund, jet_pp_det, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + R_max, suffix, **kwargs) + if (jetR - obs_setting) < 1e-3: return - + # If jetscape, we will need to correct substructure observable for holes (pt is corrected in base class) if self.jetscape: holes_in_det_jet = kwargs['holes_in_det_jet'] holes_in_truth_jet = kwargs['holes_in_truth_jet'] - + # Find all subjets subjetR = obs_setting cs_subjet_det = fj.ClusterSequence(jet_det.constituents(), self.subjet_def[subjetR]) @@ -254,7 +252,7 @@ def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, cs_subjet_truth = fj.ClusterSequence(jet_truth.constituents(), self.subjet_def[subjetR]) subjets_truth = fj.sorted_by_pt(cs_subjet_truth.inclusive_jets()) - + if not self.is_pp: cs_subjet_det_pp = fj.ClusterSequence(jet_pp_det.constituents(), self.subjet_def[subjetR]) subjets_det_pp = fj.sorted_by_pt(cs_subjet_det_pp.inclusive_jets()) @@ -266,111 +264,106 @@ def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, # First fill the combined-to-pp matches, then the pp-to-pp matches [[self.set_matching_candidates(subjet_det_combined, subjet_det_pp, subjetR, 'hDeltaR_combined_ppdet_subjet_z_R{}_{}_Rmax{}'.format(jetR, subjetR, R_max), fill_jet1_matches_only=True) for subjet_det_pp in subjets_det_pp] for subjet_det_combined in subjets_det] [[self.set_matching_candidates(subjet_det_pp, subjet_truth, subjetR, 'hDeltaR_ppdet_pptrue_subjet_z_R{}_{}_Rmax{}'.format(jetR, subjetR, R_max)) for subjet_truth in subjets_truth] for subjet_det_pp in subjets_det_pp] - + # Loop through subjets and set accepted matches if self.is_pp: [self.set_matches_pp(subjet_det, 'hSubjetMatchingQA_R{}_{}'.format(jetR, subjetR)) for subjet_det in subjets_det] else: [self.set_matches_AA(subjet_det_combined, subjetR, 'hSubjetMatchingQA_R{}_{}'.format(jetR, subjetR)) for subjet_det_combined in subjets_det] - # Loop through matches and fill histograms - for observable in self.observable_list: - - # Fill inclusive subjets - if 'inclusive' in observable: - - for subjet_det in subjets_det: - - z_det = subjet_det.pt() / jet_det.pt() - - # If z=1, it will be default be placed in overflow bin -- prevent this - if np.isclose(z_det, 1.): - z_det = 0.999 - - successful_match = False - - if subjet_det.has_user_info(): - subjet_truth = subjet_det.python_info().match - - if subjet_truth: - - successful_match = True - - # For subjet matching radius systematic, check distance between subjets - if self.matching_systematic: - if subjet_det.delta_R(subjet_truth) > 0.5 * self.jet_matching_distance * subjetR: - continue - - z_truth = subjet_truth.pt() / jet_truth.pt() - - # If z=1, it will be default be placed in overflow bin -- prevent this - if np.isclose(z_truth, 1.): - z_truth = 0.999 - - # In Pb-Pb case, fill matched pt fraction - if not self.is_pp: - self.fill_subjet_matched_pt_histograms(observable, - subjet_det, subjet_truth, - z_det, z_truth, - jet_truth.pt(), jetR, subjetR, R_max) - - # Fill histograms - # Note that we don't fill 'matched' histograms here, since that is only - # meaningful for leading subjets - self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, - z_det, z_truth, obs_label, R_max, prong_match=False) - - # Fill number of subjets with/without unique match, as a function of zr - if self.is_pp: - name = 'h_match_fraction_{}_R{}_{}'.format(observable, jetR, subjetR) - getattr(self, name).Fill(jet_truth.pt(), z_det, successful_match) - - # Get leading subjet and fill histograms - if 'leading' in observable: - - leading_subjet_det = self.utils.leading_jet(subjets_det) - leading_subjet_truth = self.utils.leading_jet(subjets_truth) - - # Note that we don't want to check whether they are geometrical matches - # We rather want to correct the measured leading subjet to the true leading subjet - if leading_subjet_det and leading_subjet_truth: - - z_leading_det = leading_subjet_det.pt() / jet_det.pt() - z_leading_truth = leading_subjet_truth.pt() / jet_truth.pt() - - # If z=1, it will be default be placed in overflow bin -- prevent this - if np.isclose(z_leading_det, 1.): - z_leading_det = 0.999 - if np.isclose(z_leading_truth, 1.): - z_leading_truth = 0.999 - - # In Pb-Pb case, fill matched pt fraction - if not self.is_pp: - match = self.fill_subjet_matched_pt_histograms(observable, - leading_subjet_det, leading_subjet_truth, + # Fill inclusive subjets + if 'inclusive' in observable: + + for subjet_det in subjets_det: + + z_det = subjet_det.pt() / jet_det.pt() + + # If z=1, it will be default be placed in overflow bin -- prevent this + if np.isclose(z_det, 1.): + z_det = 0.999 + + successful_match = False + + if subjet_det.has_user_info(): + subjet_truth = subjet_det.python_info().match + + if subjet_truth: + + successful_match = True + + # For subjet matching radius systematic, check distance between subjets + if self.matching_systematic: + if subjet_det.delta_R(subjet_truth) > 0.5 * self.jet_matching_distance * subjetR: + continue + + z_truth = subjet_truth.pt() / jet_truth.pt() + + # If z=1, it will be default be placed in overflow bin -- prevent this + if np.isclose(z_truth, 1.): + z_truth = 0.999 + + # In Pb-Pb case, fill matched pt fraction + if not self.is_pp: + self.fill_subjet_matched_pt_histograms(observable, subjet_det, + subjet_truth, z_det, z_truth, jet_truth.pt(), jetR, subjetR, R_max) + + # Fill histograms + # Note that we don't fill 'matched' histograms here, since that is only + # meaningful for leading subjets + self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + z_det, z_truth, obs_label, R_max, prong_match=False) + + # Fill number of subjets with/without unique match, as a function of zr + if self.is_pp: + name = 'h_match_fraction_{}_R{}_{}'.format(observable, jetR, subjetR) + getattr(self, name).Fill(jet_truth.pt(), z_det, successful_match) + + # Get leading subjet and fill histograms + if 'leading' in observable: + + leading_subjet_det = self.utils.leading_jet(subjets_det) + leading_subjet_truth = self.utils.leading_jet(subjets_truth) + + # Note that we don't want to check whether they are geometrical matches + # We rather want to correct the measured leading subjet to the true leading subjet + if leading_subjet_det and leading_subjet_truth: + + z_leading_det = leading_subjet_det.pt() / jet_det.pt() + z_leading_truth = leading_subjet_truth.pt() / jet_truth.pt() + + # If z=1, it will be default be placed in overflow bin -- prevent this + if np.isclose(z_leading_det, 1.): + z_leading_det = 0.999 + if np.isclose(z_leading_truth, 1.): + z_leading_truth = 0.999 + + # In Pb-Pb case, fill matched pt fraction + if not self.is_pp: + match = self.fill_subjet_matched_pt_histograms(observable, + leading_subjet_det, leading_subjet_truth, z_leading_det, z_leading_truth, jet_truth.pt(), jetR, subjetR, R_max) - else: - match = False - - # Fill histograms - self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, - z_leading_det, z_leading_truth, obs_label, R_max, prong_match=match) - - # Plot deltaR distribution between the detector and truth leading subjets - # (since they are not matched geometrically, the true leading may not be the measured leading - deltaR = leading_subjet_det.delta_R(leading_subjet_truth) - name = 'hDeltaR_det_truth_{}_R{}_{}'.format(observable, jetR, subjetR) - if not self.is_pp: - name += '_Rmax{}'.format(R_max) - getattr(self, name).Fill(jet_truth.pt(), z_leading_truth, deltaR) - + else: + match = False + + # Fill histograms + self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + z_leading_det, z_leading_truth, obs_label, R_max, prong_match=match) + + # Plot deltaR distribution between the detector and truth leading subjets + # (since they are not matched geometrically, the true leading may not be the measured leading + deltaR = leading_subjet_det.delta_R(leading_subjet_truth) + name = 'hDeltaR_det_truth_{}_R{}_{}'.format(observable, jetR, subjetR) + if not self.is_pp: + name += '_Rmax{}'.format(R_max) + getattr(self, name).Fill(jet_truth.pt(), z_leading_truth, deltaR) + #--------------------------------------------------------------- # Do prong-matching #--------------------------------------------------------------- def fill_subjet_matched_pt_histograms(self, observable, subjet_det, subjet_truth, z_det, z_truth, jet_pt_truth, jetR, subjetR, R_max): - + # Get pp det-level subjet # Inclusive case: This is matched to the combined subjet (and its pp truth-level subjet) # Leading case: This is matched only to the pp truth-level leading subjet @@ -379,11 +372,11 @@ def fill_subjet_matched_pt_histograms(self, observable, subjet_det, subjet_truth subjet_pp_det = subjet_truth.python_info().match if not subjet_pp_det: return - + matched_pt = fjtools.matched_pt(subjet_det, subjet_pp_det) name = 'h_{}_matched_pt_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, subjetR, R_max) getattr(self, name).Fill(jet_pt_truth, z_det, matched_pt) - + # Plot dz between det and truth subjets deltaZ = z_det - z_truth name = 'h_{}_matched_pt_deltaZ_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, subjetR, R_max) @@ -413,10 +406,10 @@ def fill_subjet_matched_pt_histograms(self, observable, subjet_det, subjet_truth type=str, metavar='outputDir', default='./TestOutput', help='Output directory for output to be written to') - + # Parse the arguments args = parser.parse_args() - + print('Configuring...') print('inputFile: \'{0}\''.format(args.inputFile)) print('configFile: \'{0}\''.format(args.configFile)) @@ -426,7 +419,7 @@ def fill_subjet_matched_pt_histograms(self, observable, subjet_det, subjet_truth if not os.path.exists(args.inputFile): print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) sys.exit(0) - + # If invalid configFile is given, exit if not os.path.exists(args.configFile): print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) diff --git a/pyjetty/alice_analysis/process/user/james/process_mc_theta_g.py b/pyjetty/alice_analysis/process/user/james/process_mc_theta_g.py index 5d64bfe23..0c7a954e9 100755 --- a/pyjetty/alice_analysis/process/user/james/process_mc_theta_g.py +++ b/pyjetty/alice_analysis/process/user/james/process_mc_theta_g.py @@ -3,7 +3,7 @@ """ Analysis class to read a ROOT TTree of MC track information and do jet-finding, and save response histograms. - + Author: James Mulligan (james.mulligan@berkeley.edu) """ @@ -40,7 +40,7 @@ class ProcessMC_theta_g(process_mc_base.ProcessMCBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessMC_theta_g, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) @@ -48,7 +48,7 @@ def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, # Initialize histograms #--------------------------------------------------------------- def initialize_user_output_objects_R(self, jetR): - + for observable in self.observable_list: if observable == 'theta_g': @@ -56,7 +56,7 @@ def initialize_user_output_objects_R(self, jetR): for grooming_setting in self.obs_grooming_settings[observable]: if grooming_setting: grooming_label = self.utils.grooming_label(grooming_setting) - + if self.is_pp: self.create_theta_g_histograms(observable, jetR, grooming_label) else: @@ -64,7 +64,7 @@ def initialize_user_output_objects_R(self, jetR): self.create_theta_g_histograms(observable, jetR, grooming_label, R_max) if R_max == self.main_R_max: self.create_theta_g_histograms(observable, jetR, grooming_label, '{}_matched'.format(R_max)) - + if self.thermal_model: for R_max in self.max_distance: name = 'h_{}_JetPt_R{}_{}_Rmax{}'.format(observable, jetR, grooming_label, R_max) @@ -72,7 +72,7 @@ def initialize_user_output_objects_R(self, jetR): h.GetXaxis().SetTitle('p_{T,ch jet}') h.GetYaxis().SetTitle('#theta_{g,ch}') setattr(self, name, h) - + name = 'h_{}_JetPt_Truth_R{}_{}'.format(observable, jetR, grooming_label) h = ROOT.TH2F(name, name, 20, 0, 200, 100, 0, 1.0) h.GetXaxis().SetTitle('p_{T,ch jet}') @@ -80,11 +80,11 @@ def initialize_user_output_objects_R(self, jetR): setattr(self, name, h) if observable == 'zg': - + for grooming_setting in self.obs_grooming_settings[observable]: if grooming_setting: grooming_label = self.utils.grooming_label(grooming_setting) - + if self.is_pp: self.create_zg_histograms(observable, jetR, grooming_label) else: @@ -100,7 +100,7 @@ def initialize_user_output_objects_R(self, jetR): h.GetXaxis().SetTitle('p_{T,ch jet}') h.GetYaxis().SetTitle('z_{g,ch}') setattr(self, name, h) - + name = 'h_{}_JetPt_Truth_R{}_{}'.format(observable, jetR, grooming_label) h = ROOT.TH2F(name, name, 20, 0, 200, 50, 0, 0.5) h.GetXaxis().SetTitle('p_{T,ch jet}') @@ -123,17 +123,17 @@ def initialize_user_output_objects_R(self, jetR): grooming_label = self.utils.grooming_label(grooming_setting) if not self.is_pp: self.create_prong_matching_histograms(jetR, grooming_label) - + #--------------------------------------------------------------- # Create Lund plane histograms #--------------------------------------------------------------- def create_lund_histograms(self, jetR, grooming_label, R_max = None): - + if R_max: suffix = '_Rmax{}'.format(R_max) else: suffix = '' - + name = 'hLundPlane_R{}_{}{}'.format(jetR, grooming_label, suffix) h = ROOT.TH2F(name, name, 140, 0, 7, 100, -4., 6.) h.GetXaxis().SetTitle('log(1/ #DeltaR)') @@ -144,7 +144,7 @@ def create_lund_histograms(self, jetR, grooming_label, R_max = None): # Create theta_g response histograms #--------------------------------------------------------------- def create_prong_matching_histograms(self, jetR, grooming_label): - + prong_list = ['leading', 'subleading'] match_list = ['leading', 'subleading', 'ungroomed', 'outside'] @@ -158,14 +158,14 @@ def create_prong_matching_histograms(self, jetR, grooming_label): h.GetYaxis().SetTitle('Prong matching fraction') h.GetZaxis().SetTitle('#Delta R_{prong}') setattr(self, name, h) - + name = 'hProngMatching_{}_{}_JetPtDet_R{}_{}_Rmax{}'.format(prong, match, jetR, grooming_label, R_max) h = ROOT.TH3F(name, name, 20, 0, 200, 15, -0.4, 1.1, 20, 0., 2*jetR) h.GetXaxis().SetTitle('p_{T,pp-det}') h.GetYaxis().SetTitle('Prong matching fraction') h.GetZaxis().SetTitle('#Delta R_{prong}') setattr(self, name, h) - + name = 'hProngMatching_{}_{}_JetPtZ_R{}_{}_Rmax{}'.format(prong, match, jetR, grooming_label, R_max) h = ROOT.TH3F(name, name, 20, 0, 200, 15, -0.4, 1.1, 50, -0.5, 0.5) h.GetXaxis().SetTitle('p_{T,truth}') @@ -189,7 +189,7 @@ def create_theta_g_histograms(self, observable, jetR, grooming_label, R_max = No suffix = '_Rmax{}'.format(R_max) else: suffix = '' - + # Create THn of response for theta_g if self.fill_RM_histograms: dim = 4; @@ -199,7 +199,7 @@ def create_theta_g_histograms(self, observable, jetR, grooming_label, R_max = No max = [150., 200., 1., 1.] name = 'hResponse_JetPt_{}_R{}_{}{}'.format(observable, jetR, grooming_label, suffix) self.create_thn(name, title, dim, nbins, min, max) - + name = 'hResidual_JetPt_{}_R{}_{}{}'.format(observable, jetR, grooming_label, suffix) h = ROOT.TH3F(name, name, 20, 0, 200, 100, 0., 1., 200, -2., 2.) h.GetXaxis().SetTitle('p_{T,truth}') @@ -216,7 +216,7 @@ def create_zg_histograms(self, observable, jetR, grooming_label, R_max = None): suffix = '_Rmax{}'.format(R_max) else: suffix = '' - + # Create THn of response for z_g if self.fill_RM_histograms: dim = 4; @@ -234,71 +234,81 @@ def create_zg_histograms(self, observable, jetR, grooming_label, R_max = None): h.GetZaxis().SetTitle('#frac{z_{g,det}-z_{g,truth}}{z_{g,truth}}') setattr(self, name, h) + #--------------------------------------------------------------- + # Calculate the observable given a jet + #--------------------------------------------------------------- + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + if observable == "theta_g": + return jet_groomed_lund.Delta() / jetR + + elif observable == "zg": + return zg = jet_groomed_lund.z() + + # No other observables are implemented in this script + raise ValueError("Observable %s not implemented" % observable) #--------------------------------------------------------------- # This function is called once for each jet subconfiguration # Fill 2D histogram of (pt, obs) #--------------------------------------------------------------- - def fill_observable_histograms(self, hname, jet, jet_groomed_lund, jetR, obs_setting, - grooming_setting, obs_label, jet_pt_ungroomed): + def fill_observable_histograms(self, observable, hname, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + obs = self.calculate_observable(observable, jet, jet_groomed_lund, jetR, + obs_setting, grooming_setting, obs_label, jet_pt_ungroomed) - theta_g = jet_groomed_lund.Delta()/jetR - zg = jet_groomed_lund.z() - - getattr(self, hname.format('theta_g', obs_label)).Fill(jet_pt_ungroomed, theta_g) - getattr(self, hname.format('zg', obs_label)).Fill(jet_pt_ungroomed, zg) + getattr(self, hname.format(observable, obs_label)).Fill(jet_pt_ungroomed, obs) #--------------------------------------------------------------- # Fill matched jet histograms #--------------------------------------------------------------- - def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, - jet_truth_groomed_lund, jet_pp_det, jetR, - obs_setting, grooming_setting, obs_label, - jet_pt_det_ungroomed, jet_pt_truth_ungroomed, R_max, suffix, **kwargs): - + def fill_matched_jet_histograms(self, observable, jet_det, jet_det_groomed_lund, + jet_truth, jet_truth_groomed_lund, jet_pp_det, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + R_max, suffix, **kwargs): + # Compute groomed observables - theta_g_det = jet_det_groomed_lund.Delta()/jetR - theta_g_truth = jet_truth_groomed_lund.Delta()/jetR - zg_det = jet_det_groomed_lund.z() - zg_truth = jet_truth_groomed_lund.z() - - # Fill Lund diagrams - lund_coords = self.lund_coordinates(jet_truth_groomed_lund) - name = 'hLundPlane_R{}_{}{}'.format(jetR, obs_label, suffix) - if jet_pt_truth_ungroomed > 100.: - getattr(self, name).Fill(lund_coords[0], lund_coords[1]) - - # Fill prong-matching histograms + obs_det = self.calculate_observable(observable, jet_det, jet_det_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_det_ungroomed) + + obs_tru = self.calculate_observable(observable, jet_truth, jet_truth_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_truth_ungroomed) + + # Only needs to be done once for either theta_g or zg + if observable == "zg": + # Fill Lund diagrams + lund_coords = self.lund_coordinates(jet_truth_groomed_lund) + name = 'hLundPlane_R{}_{}{}'.format(jetR, obs_label, suffix) + if jet_pt_truth_ungroomed > 100.: + getattr(self, name).Fill(lund_coords[0], lund_coords[1]) + + # If PbPb, find matching prong, and fill extra RM only for successful prong matches + prong_match = False + # Fill prong-matching histograms once for either theta_g or zg if not self.is_pp and grooming_setting in self.obs_grooming_settings['theta_g']: prong_match = self.fill_prong_matching_histograms(jet_truth, jet_det, jet_det_groomed_lund, - jet_pt_truth_ungroomed, jetR, grooming_setting, - obs_label, R_max) - - # If PbPb, fill extra RM only for successful prong matches - if self.is_pp: - prong_match = False + jet_pt_truth_ungroomed, jetR, grooming_setting, obs_label, R_max, fill=(observable == "zg")) # Fill histograms - observable = 'theta_g' - self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, theta_g_det, theta_g_truth, obs_label, R_max, prong_match = prong_match) - - observable = 'zg' - self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, zg_det, zg_truth, obs_label, R_max, prong_match = prong_match) - + self.fill_response(observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + obs_det, obs_truth, obs_label, R_max, prong_match = prong_match) + #--------------------------------------------------------------- # Do prong-matching #--------------------------------------------------------------- - def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lund, jet_pt_truth_ungroomed, - jetR, grooming_setting, grooming_label, R_max): - + def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lund, + jet_pt_truth_ungroomed, jetR, grooming_setting, grooming_label, R_max, fill=True): + # Do grooming on pp-det jet, and get prongs jet_pp_det = jet_truth.python_info().match - + gshop = fjcontrib.GroomerShop(jet_pp_det, jetR, fj.cambridge_algorithm) jet_pp_det_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) if not jet_pp_det_groomed_lund: return - + # Groomer shop returns a fjcontrib::LundGenerator # The prongs can be retrieved directly from this object. # If the object exists, then it has passed grooming @@ -308,15 +318,15 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun # Get prongs of combined jet jet_combined_prong1 = jet_det_groomed_lund.harder() jet_combined_prong2 = jet_det_groomed_lund.softer() - + # Get the fastjet::PseudoJets from the fjcontrib::LundGenerators jet_pp_det_groomed = jet_pp_det_groomed_lund.pair() jet_det_groomed = jet_det_groomed_lund.pair() - + if self.debug_level > 1: if jet_pt_truth_ungroomed > 80.: - + print('=======================================================') print('jet_pt_truth_ungroomed: {}'.format(jet_pt_truth_ungroomed)) print('jet_pt_pp_det_ungroomed: {}'.format(jet_pp_det.pt())) @@ -348,12 +358,12 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun # (2) Fraction of pt matched: subleading pp-det in leading combined matched_pt_subleading_leading = fjtools.matched_pt(jet_combined_prong1, jet_pp_det_prong2) - + # (3) Fraction of pt matched: subleading pp-det in ungroomed combined jet matched_pt_subleading_groomed = fjtools.matched_pt(jet_det_groomed, jet_pp_det_prong2) matched_pt_subleading_ungroomed = fjtools.matched_pt(jet_det, jet_pp_det_prong2) matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_ungroomed - matched_pt_subleading_groomed - + # (4) Fraction of pt matched: subleading pp-det not in ungroomed combined jet matched_pt_subleading_outside = 1 - matched_pt_subleading_ungroomed @@ -361,7 +371,7 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun # -------------------------- # (1) Fraction of pt matched: leading pp-det in subleading combined matched_pt_leading_subleading = fjtools.matched_pt(jet_combined_prong2, jet_pp_det_prong1) - + # (2) Fraction of pt matched: leading pp-det in leading combined matched_pt_leading_leading = fjtools.matched_pt(jet_combined_prong1, jet_pp_det_prong1) @@ -369,7 +379,7 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun matched_pt_leading_groomed = fjtools.matched_pt(jet_det_groomed, jet_pp_det_prong1) matched_pt_leading_ungroomed = fjtools.matched_pt(jet_det, jet_pp_det_prong1) matched_pt_leading_ungroomed_notgroomed = matched_pt_leading_ungroomed - matched_pt_leading_groomed - + # (4) Fraction of pt matched: leading pp-det not in ungroomed combined jet matched_pt_leading_outside = 1 - matched_pt_leading_ungroomed @@ -380,9 +390,9 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun deltaZ = jet_det_groomed_lund.z() - jet_pp_det_groomed_lund.z() if self.debug_level > 1: - + if jet_pt_truth_ungroomed > 80.: - + print('subleading prong tracks -- combined: {}'.format([track.user_index() for track in jet_combined_prong2.constituents()])) print('subleading prong tracks -- pp-det: {}'.format([track.user_index() for track in jet_pp_det_prong2.constituents()])) print('leading prong tracks -- combined: {}'.format([track.user_index() for track in jet_combined_prong1.constituents()])) @@ -404,48 +414,81 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun print('deltaR_prong2: {}'.format(deltaR_prong2)) elif jet_pp_det_groomed.has_constituents(): # pp-det passed grooming, but combined jet failed grooming - matched_pt_leading_leading = matched_pt_leading_subleading = matched_pt_leading_ungroomed_notgroomed = matched_pt_leading_outside = matched_pt_subleading_leading = matched_pt_subleading_subleading = matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_outside = -0.1 - + matched_pt_leading_leading = matched_pt_leading_subleading = matched_pt_leading_ungroomed_notgroomed = \ + matched_pt_leading_outside = matched_pt_subleading_leading = matched_pt_subleading_subleading = \ + matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_outside = -0.1 + elif jet_det_groomed.has_constituents(): # combined jet passed grooming, but pp-det failed grooming - matched_pt_leading_leading = matched_pt_leading_subleading = matched_pt_leading_ungroomed_notgroomed = matched_pt_leading_outside = matched_pt_subleading_leading = matched_pt_subleading_subleading = matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_outside = -0.2 - + matched_pt_leading_leading = matched_pt_leading_subleading = matched_pt_leading_ungroomed_notgroomed = \ + matched_pt_leading_outside = matched_pt_subleading_leading = matched_pt_subleading_subleading = \ + matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_outside = -0.2 + else: # both pp-det and combined jet failed SoftDrop - matched_pt_leading_leading = matched_pt_leading_subleading = matched_pt_leading_ungroomed_notgroomed = matched_pt_leading_outside = matched_pt_subleading_leading = matched_pt_subleading_subleading = matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_outside = -0.3 - - # Leading prong - getattr(self, 'hProngMatching_leading_leading_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_leading, deltaR_prong1) - getattr(self, 'hProngMatching_leading_subleading_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_subleading, deltaR_prong1) - getattr(self, 'hProngMatching_leading_ungroomed_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_ungroomed_notgroomed, deltaR_prong1) - getattr(self, 'hProngMatching_leading_outside_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_outside, deltaR_prong1) - - getattr(self, 'hProngMatching_leading_leading_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_leading_leading, deltaR_prong1) - getattr(self, 'hProngMatching_leading_subleading_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_leading_subleading, deltaR_prong1) - getattr(self, 'hProngMatching_leading_ungroomed_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_leading_ungroomed_notgroomed, deltaR_prong1) - getattr(self, 'hProngMatching_leading_outside_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_leading_outside, deltaR_prong1) - - getattr(self, 'hProngMatching_leading_leading_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_leading, deltaZ) - getattr(self, 'hProngMatching_leading_subleading_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_subleading, deltaZ) - getattr(self, 'hProngMatching_leading_ungroomed_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_ungroomed_notgroomed, deltaZ) - getattr(self, 'hProngMatching_leading_outside_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_leading_outside, deltaZ) - - # Subleading prong - getattr(self, 'hProngMatching_subleading_leading_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_leading, deltaR_prong2) - getattr(self, 'hProngMatching_subleading_subleading_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_subleading, deltaR_prong2) - getattr(self, 'hProngMatching_subleading_ungroomed_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_ungroomed_notgroomed, deltaR_prong2) - getattr(self, 'hProngMatching_subleading_outside_JetPt_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_outside, deltaR_prong2) - - getattr(self, 'hProngMatching_subleading_leading_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_subleading_leading, deltaR_prong2) - getattr(self, 'hProngMatching_subleading_subleading_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_subleading_subleading, deltaR_prong2) - getattr(self, 'hProngMatching_subleading_ungroomed_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_subleading_ungroomed_notgroomed, deltaR_prong2) - getattr(self, 'hProngMatching_subleading_outside_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_subleading_outside, deltaR_prong2) - - getattr(self, 'hProngMatching_subleading_leading_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_leading, deltaZ) - getattr(self, 'hProngMatching_subleading_subleading_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_subleading, deltaZ) - getattr(self, 'hProngMatching_subleading_ungroomed_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_ungroomed_notgroomed, deltaZ) - getattr(self, 'hProngMatching_subleading_outside_JetPtZ_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pt_truth_ungroomed, matched_pt_subleading_outside, deltaZ) - - # Plot correlation of matched pt fraction for leading-subleading and subleading-leading - getattr(self, 'hProngMatching_subleading-leading_correlation_JetPtDet_R{}_{}_Rmax{}'.format(jetR, grooming_label, R_max)).Fill(jet_pp_det.pt(), matched_pt_leading_subleading, matched_pt_subleading_leading) + matched_pt_leading_leading = matched_pt_leading_subleading = matched_pt_leading_ungroomed_notgroomed = \ + matched_pt_leading_outside = matched_pt_subleading_leading = matched_pt_subleading_subleading = \ + matched_pt_subleading_ungroomed_notgroomed = matched_pt_subleading_outside = -0.3 + + if fill: + # Leading prong + getattr(self, 'hProngMatching_leading_leading_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_leading, deltaR_prong1) + getattr(self, 'hProngMatching_leading_subleading_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_subleading, deltaR_prong1) + getattr(self, 'hProngMatching_leading_ungroomed_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_ungroomed_notgroomed, deltaR_prong1) + getattr(self, 'hProngMatching_leading_outside_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_outside, deltaR_prong1) + + getattr(self, 'hProngMatching_leading_leading_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_leading_leading, deltaR_prong1) + getattr(self, 'hProngMatching_leading_subleading_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_leading_subleading, deltaR_prong1) + getattr(self, 'hProngMatching_leading_ungroomed_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_leading_ungroomed_notgroomed, deltaR_prong1) + getattr(self, 'hProngMatching_leading_outside_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_leading_outside, deltaR_prong1) + + getattr(self, 'hProngMatching_leading_leading_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_leading, deltaZ) + getattr(self, 'hProngMatching_leading_subleading_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_subleading, deltaZ) + getattr(self, 'hProngMatching_leading_ungroomed_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_ungroomed_notgroomed, deltaZ) + getattr(self, 'hProngMatching_leading_outside_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_leading_outside, deltaZ) + + # Subleading prong + getattr(self, 'hProngMatching_subleading_leading_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_leading, deltaR_prong2) + getattr(self, 'hProngMatching_subleading_subleading_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_subleading, deltaR_prong2) + getattr(self, 'hProngMatching_subleading_ungroomed_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_ungroomed_notgroomed, deltaR_prong2) + getattr(self, 'hProngMatching_subleading_outside_JetPt_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_outside, deltaR_prong2) + + getattr(self, 'hProngMatching_subleading_leading_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_subleading_leading, deltaR_prong2) + getattr(self, 'hProngMatching_subleading_subleading_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_subleading_subleading, deltaR_prong2) + getattr(self, 'hProngMatching_subleading_ungroomed_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_subleading_ungroomed_notgroomed, deltaR_prong2) + getattr(self, 'hProngMatching_subleading_outside_JetPtDet_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_subleading_outside, deltaR_prong2) + + getattr(self, 'hProngMatching_subleading_leading_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_leading, deltaZ) + getattr(self, 'hProngMatching_subleading_subleading_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_subleading, deltaZ) + getattr(self, 'hProngMatching_subleading_ungroomed_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_ungroomed_notgroomed, deltaZ) + getattr(self, 'hProngMatching_subleading_outside_JetPtZ_R%s_%s_Rmax%s' % (jetR, grooming_label, R_max)).Fill( + jet_pt_truth_ungroomed, matched_pt_subleading_outside, deltaZ) + + # Plot correlation of matched pt fraction for leading-subleading and subleading-leading + getattr(self, 'hProngMatching_subleading-leading_correlation_JetPtDet_R%s_%s_Rmax%s' % \ + (jetR, grooming_label, R_max)).Fill( + jet_pp_det.pt(), matched_pt_leading_subleading, matched_pt_subleading_leading) subleading_match = (matched_pt_subleading_subleading > 0.5) leading_match = (matched_pt_leading_leading > 0.5) @@ -468,10 +511,10 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun type=str, metavar='outputDir', default='./TestOutput', help='Output directory for output to be written to') - + # Parse the arguments args = parser.parse_args() - + print('Configuring...') print('inputFile: \'{0}\''.format(args.inputFile)) print('configFile: \'{0}\''.format(args.configFile)) @@ -481,7 +524,7 @@ def fill_prong_matching_histograms(self, jet_truth, jet_det, jet_det_groomed_lun if not os.path.exists(args.inputFile): print('File \"{0}\" does not exist! Exiting!'.format(args.inputFile)) sys.exit(0) - + # If invalid configFile is given, exit if not os.path.exists(args.configFile): print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) diff --git a/pyjetty/alice_analysis/process/user/james/pythia_parton_hadron.py b/pyjetty/alice_analysis/process/user/james/pythia_parton_hadron.py new file mode 100755 index 000000000..d686e1bbc --- /dev/null +++ b/pyjetty/alice_analysis/process/user/james/pythia_parton_hadron.py @@ -0,0 +1,909 @@ +#!/usr/bin/env python + +from __future__ import print_function + +import fastjet as fj +import fjcontrib +import fjext + +import ROOT + +import tqdm +import yaml +import copy +import argparse +import os + +from pyjetty.mputils import * + +from heppy.pythiautils import configuration as pyconf +import pythia8 +import pythiafjext +import pythiaext + +from pyjetty.alice_analysis.process.base import process_base + +from array import array +import numpy as np + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) +# Automatically set Sumw2 when creating new histograms +ROOT.TH1.SetDefaultSumw2() + +################################################################ +class pythia_parton_hadron(process_base.ProcessBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', + debug_level=0, args=None, **kwargs): + + super(pythia_parton_hadron, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + self.initialize_config(args) + + #--------------------------------------------------------------- + # Main processing function + #--------------------------------------------------------------- + def pythia_parton_hadron(self, args): + + # Create ROOT TTree file for storing raw PYTHIA particle information + outf_path = os.path.join(self.output_dir, args.tree_output_fname) + outf = ROOT.TFile(outf_path, 'recreate') + outf.cd() + + # Initialize response histograms + self.initialize_hist() + + pinfo('user seed for pythia', self.user_seed) + # mycfg = ['PhaseSpace:pThatMin = 100'] + mycfg = ['Random:setSeed=on', 'Random:seed={}'.format(self.user_seed)] + mycfg.append('HadronLevel:all=off') + + # PYTHIA instance with MPI off + setattr(args, "py_noMPI", True) + pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) + + # print the banner first + fj.ClusterSequence.print_banner() + print() + + self.init_jet_tools() + self.calculate_events(pythia) + pythia.stat() + print() + + # PYTHIA instance with MPI on + setattr(args, "py_noMPI", False) + pythia_MPI = pyconf.create_and_init_pythia_from_args(args, mycfg) + self.calculate_events(pythia_MPI, MPIon=True) + print() + + if not self.no_tree: + for jetR in self.jetR_list: + getattr(self, "tw_R%s" % str(jetR)).fill_tree() + + self.scale_print_final_info(pythia, pythia_MPI) + + outf.Write() + outf.Close() + + self.save_output_objects() + + #--------------------------------------------------------------- + # Initialize config file into class members + #--------------------------------------------------------------- + def initialize_config(self, args): + + # Call base class initialization + process_base.ProcessBase.initialize_config(self) + + # Read config file + with open(self.config_file, 'r') as stream: + config = yaml.safe_load(stream) + + if not os.path.exists(self.output_dir): + os.makedirs(self.output_dir) + + # Defaults to None if not in use + self.level = args.no_match_level + + self.jetR_list = config["jetR"] + + self.user_seed = args.user_seed + self.nev = args.nev + + # Formatted LaTeX names for plotting + self.obs_names = ["#it{#theta}_{g}", "#it{z}_{g}"] + self.observables = config['process_observables'] + self.obs_settings = {} + self.obs_grooming_settings = {} + for observable in self.observables: + obs_config_dict = config[observable] + obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + self.obs_settings[observable] = self.utils.obs_settings( + observable, obs_config_dict, obs_subconfig_list) + self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) + + # Construct set of unique grooming settings + self.grooming_settings = [] + lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observables] + for observable in lists_grooming: + for setting in observable: + if setting not in self.grooming_settings and setting != None: + self.grooming_settings.append(setting) + self.grooming_labels = [self.utils.grooming_label(gs) for gs in self.grooming_settings] + + # Observable binnings for theta_g and zg + self.obs_bins_theta_g = array('d', np.arange(0, 1.01, 0.01)) + self.obs_bins_zg = array('d', np.arange(0, 0.505, 0.005)) + + # We are not reporting zg theory so save time/memory by skipping these histograms + self.skip_zg = True + if self.skip_zg: + self.obs_names = self.obs_names[:-1] + self.observables = [self.observables[i] for i in range(len(self.observables)) + if self.observables[i] != 'zg'] + + # Manually added binnings for RM and scaling histograms + if 'theory_pt_bins' in config: + self.pt_bins = array('d', config['theory_pt_bins']) + + # hadron level - ALICE tracking restriction + self.max_eta_hadron = 0.9 + + # Whether or not to rescale final jet histograms based on sigma/N + self.no_scale = args.no_scale + + # Whether or not to save particle info in raw tree structure + self.no_tree = args.no_tree + + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_hist(self): + + self.hNevents = ROOT.TH1I("hNevents", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + self.hNeventsMPI = ROOT.TH1I("hNeventsMPI", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + + for jetR in self.jetR_list: + + # Store a list of all the histograms just so that we can rescale them later + hist_list_name = "hist_list_R%s" % str(jetR) + setattr(self, hist_list_name, []) + hist_list_name_MPIon = "hist_list_MPIon_R%s" % str(jetR) + setattr(self, hist_list_name_MPIon, []) + + R_label = str(jetR) + 'Scaled' + + if self.level in [None, 'ch']: + name = 'hJetPt_ch_R%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{ch jet};#frac{dN}{dp_{T}^{ch jet}};', 300, 0, 300) + h.Sumw2() # enables calculation of errors + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hNconstit_Pt_ch_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'h']: + name = 'hJetPt_h_R%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{jet, h};#frac{dN}{dp_{T}^{jet, h}};', 300, 0, 300) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hNconstit_Pt_h_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{h jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{h jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'p']: + name = 'hJetPt_p_R%s' % R_label + h = ROOT.TH1F(name, name+';p_{T}^{jet, parton};#frac{dN}{dp_{T}^{jet, parton}};', + 300, 0, 300) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hNconstit_Pt_p_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 50, 0.5, 50.5) + h.GetXaxis().SetTitle('#it{p}_{T}^{p jet}') + h.GetYaxis().SetTitle('#it{N}_{constit}^{p jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level == None: + name = 'hJetPtRes_R%s' % R_label + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 1.) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle( + '#frac{#it{p}_{T}^{parton jet}-#it{p}_{T}^{ch jet}}{#it{p}_{T}^{parton jet}}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'hResponse_JetPt_R%s' % R_label + h = ROOT.TH2F(name, name, 200, 0, 200, 200, 0, 200) + h.GetXaxis().SetTitle('#it{p}_{T}^{parton jet}') + h.GetYaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + for i_obs, obs in enumerate(self.observables): + obs_bins = getattr(self, "obs_bins_%s" % obs) + obs_name = self.obs_names[i_obs] + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + label = "R%s_%s" % (str(jetR), gl) + + if self.level in [None, 'ch']: + name = 'h_JetPt_%s_ch_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{ch jet}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{ch}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = 'h_JetPt_%s_ch_MPIon_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{ch jet}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{ch}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name_MPIon).append(h) + + if self.level in [None, 'h']: + name = 'h_JetPt_%s_h_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{jet, h}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{h}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level in [None, 'p']: + name = 'h_JetPt_%s_p_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(obs_bins)-1, obs_bins) + h.GetXaxis().SetTitle('p_{T}^{jet, parton}') + h.GetYaxis().SetTitle('#frac{dN}{d%s^{parton}}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + if self.level == None: + name = 'hResponse_%s_p_ch_MPIoff_%sScaled' % (obs, label) + h = ROOT.TH2F(name, name, 100, 0, 1, 100, 0, 1) + h.GetXaxis().SetTitle('%s^{parton}' % obs_name) + h.GetYaxis().SetTitle('%s^{ch}' % obs_name) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = "hResidual_JetPt_%s_MPIoff_%sScaled" % (obs, label) + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -3., 1.) + h.GetXaxis().SetTitle('p_{T}^{p jet}') + h.GetYaxis().SetTitle('#frac{%s^{p}-%s^{ch}}{%s^{p}}' % (obs_name, obs_name, obs_name)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + name = "hDiff_JetPt_%s_MPIoff_%sScaled" % (obs, label) + h = ROOT.TH2F(name, name, 300, 0, 300, 200, -2., 2.) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('%s^{p}-%s^{ch}' % (obs_name, obs_name)) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Create THn of response + dim = 4 + title = ['p_{T}^{ch jet}', 'p_{T}^{parton jet}', + obs_name + '^{ch}', obs_name + '^{parton}'] + nbins = [len(self.pt_bins)-1, len(self.pt_bins)-1, + len(obs_bins)-1, len(obs_bins)-1] + min_li = [self.pt_bins[0], self.pt_bins[0], + obs_bins[0], obs_bins[0] ] + max_li = [self.pt_bins[-1], self.pt_bins[-1], + obs_bins[-1], obs_bins[-1] ] + + name = 'hResponse_JetPt_%s_p_ch_MPIoff_%sScaled' % (obs, label) + nbins = (nbins) + xmin = (min_li) + xmax = (max_li) + nbins_array = array('i', nbins) + xmin_array = array('d', xmin) + xmax_array = array('d', xmax) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Another set of THn for full hadron folding + title = ['p_{T}^{h jet}', 'p_{T}^{parton jet}', + obs_name + '^{h}', obs_name + '^{parton}'] + + name = 'hResponse_JetPt_%s_p_h_MPIoff_%sScaled' % (obs, label) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name).append(h) + + # Finally, a set of THn for folding H --> CH (with MPI on) + title = ['p_{T}^{ch jet}', 'p_{T}^{h jet}', + obs_name + '^{ch}', obs_name + '^{h}'] + + name = 'hResponse_JetPt_%s_h_ch_MPIon_%sScaled' % (obs, label) + h = ROOT.THnF(name, name, dim, nbins_array, xmin_array, xmax_array) + for i in range(0, dim): + h.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + h.SetBinEdges(i, self.pt_bins) + else: # i == 2 or i == 3 + h.SetBinEdges(i, obs_bins) + h.Sumw2() + setattr(self, name, h) + getattr(self, hist_list_name_MPIon).append(h) + + + #--------------------------------------------------------------- + # Initiate jet defs, selectors, and sd (if required) + #--------------------------------------------------------------- + def init_jet_tools(self): + + for jetR in self.jetR_list: + jetR_str = str(jetR) + + if not self.no_tree: + # Initialize tree writer + name = 'particle_unscaled_R%s' % jetR_str + t = ROOT.TTree(name, name) + setattr(self, "t_R%s" % jetR_str, t) + tw = RTreeWriter(tree=t) + setattr(self, "tw_R%s" % jetR_str, tw) + + # set up our jet definition and a jet selector + jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) + setattr(self, "jet_def_R%s" % jetR_str, jet_def) + print(jet_def) + + pwarning('max eta for particles after hadronization set to', self.max_eta_hadron) + parts_selector_h = fj.SelectorAbsEtaMax(self.max_eta_hadron) + + for jetR in self.jetR_list: + jetR_str = str(jetR) + + jet_selector = fj.SelectorPtMin(5.0) & \ + fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + setattr(self, "jet_selector_R%s" % jetR_str, jet_selector) + + #max_eta_parton = self.max_eta_hadron + 2. * jetR + #setattr(self, "max_eta_parton_R%s" % jetR_str, max_eta_parton) + #pwarning("Max eta for partons with jet R =", jetR, "set to", max_eta_parton) + #parts_selector_p = fj.SelectorAbsEtaMax(max_eta_parton) + #setattr(self, "parts_selector_p_R%s" % jetR_str, parts_selector_p) + + count1 = 0 # Number of jets rejected from ch-h matching + setattr(self, "count1_R%s" % jetR_str, count1) + count2 = 0 # Number of jets rejected from h-p matching + setattr(self, "count2_R%s" % jetR_str, count2) + + + #--------------------------------------------------------------- + # Calculate events and pass information on to jet finding + #--------------------------------------------------------------- + def calculate_events(self, pythia, MPIon=False): + + iev = 0 # Event loop count + + if MPIon: + hNevents = self.hNeventsMPI + else: + hNevents = self.hNevents + + while hNevents.GetBinContent(1) < self.nev: + if not pythia.next(): + continue + + parts_pythia_p = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + hstatus = pythia.forceHadronLevel() + if not hstatus: + #pwarning('forceHadronLevel false event', iev) + continue + #parts_pythia_h = pythiafjext.vectorize_select( + # pythia, [pythiafjext.kHadron, pythiafjext.kCharged]) + parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + parts_pythia_hch = pythiafjext.vectorize_select( + pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + + """ TODO: fix for multiple jet R + parts_pythia_p_selected = parts_selector_p(parts_pythia_p) + parts_pythia_h_selected = parts_selector_h(parts_pythia_h) + parts_pythia_hch_selected = parts_selector_h(parts_pythia_hch) + + if self.debug_level > 1: + pinfo('debug partons...') + for p in parts_pythia_p_selected: + pyp = pythiafjext.getPythia8Particle(p) + print(pyp.name()) + pinfo('debug hadrons...') + for p in parts_pythia_h_selected: + pyp = pythiafjext.getPythia8Particle(p) + print(pyp.name()) + pinfo('debug ch. hadrons...') + for p in parts_pythia_hch_selected: + pyp = pythiafjext.getPythia8Particle(p) + print(pyp.name()) + """ + + # Some "accepted" events don't survive hadronization step -- keep track here + hNevents.Fill(0) + self.find_jets_fill_trees(parts_pythia_p, parts_pythia_h, parts_pythia_hch, iev, MPIon) + + iev += 1 + + + #--------------------------------------------------------------- + # Find jets, do matching between levels, and fill histograms & trees + #--------------------------------------------------------------- + def find_jets_fill_trees(self, parts_pythia_p, parts_pythia_h, parts_pythia_hch, + iev, MPIon=False): + + for jetR in self.jetR_list: + jetR_str = str(jetR) + jet_selector = getattr(self, "jet_selector_R%s" % jetR_str) + jet_def = getattr(self, "jet_def_R%s" % jetR_str) + t = None; tw = None; + if not self.no_tree: + t = getattr(self, "t_R%s" % jetR_str) + tw = getattr(self, "tw_R%s" % jetR_str) + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + + # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) + jets_p = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_p))) + jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_h))) + jets_ch = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_hch))) + + if MPIon: + for jet in jets_ch: + self.fill_MPI_histograms(jetR, jet) + + if self.level and not MPIon: # Only save info at one level w/o matching + if not self.no_tree: + jets = locals()["jets_%s" % self.level] + for jet in jets: + self.fill_unmatched_jet_tree(tw, jetR, iev, jet) + continue + + for i,jchh in enumerate(jets_ch): + + # match hadron (full) jet + drhh_list = [] + for j, jh in enumerate(jets_h): + drhh = jchh.delta_R(jh) + if drhh < jetR / 2.: + drhh_list.append((j,jh)) + if len(drhh_list) != 1: + count1 += 1 + else: # Require unique match + j, jh = drhh_list[0] + + # match parton level jet + dr_list = [] + for k, jp in enumerate(jets_p): + dr = jh.delta_R(jp) + if dr < jetR / 2.: + dr_list.append((k, jp)) + if len(dr_list) != 1: + count2 += 1 + else: + k, jp = dr_list[0] + + if self.debug_level > 0: + pwarning('event', iev) + pinfo('matched jets: ch.h:', jchh.pt(), 'h:', jh.pt(), + 'p:', jp.pt(), 'dr:', dr) + + if not MPIon: + self.fill_jet_histograms(jetR, jp, jh, jchh) + if not self.no_tree: + self.fill_matched_jet_tree(tw, jetR, iev, jp, jh, jchh) + else: + self.fill_jet_histograms_MPI(jetR, jp, jh, jchh) + + #print(" |-> SD jet params z={0:10.3f} dR={1:10.3f} mu={2:10.3f}".format( + # sd_info.z, sd_info.dR, sd_info.mu)) + + if MPIon: + setattr(self, "count1_R%s_MPIon" % jetR_str, count1) + setattr(self, "count2_R%s_MPIon" % jetR_str, count2) + else: + setattr(self, "count1_R%s" % jetR_str, count1) + setattr(self, "count2_R%s" % jetR_str, count2) + + + #--------------------------------------------------------------- + # Fill jet tree with (unscaled/raw) matched parton/hadron tracks + #--------------------------------------------------------------- + def fill_matched_jet_tree(self, tw, jetR, iev, jp, jh, jchh): + + tw.fill_branch('iev', iev) + tw.fill_branch('ch', jchh) + tw.fill_branch('h', jh) + tw.fill_branch('p', jp) + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + # Groomed jets + gshop_chh = fjcontrib.GroomerShop(jchh, jetR, self.reclustering_algorithm) + jet_ch_groomed_lund = self.utils.groom(gshop_chh, gs, jetR) + if not jet_ch_groomed_lund: + continue + + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jet_h_groomed_lund = self.utils.groom(gshop_h, gs, jetR) + if not jet_h_groomed_lund: + continue + + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jet_p_groomed_lund = self.utils.groom(gshop_p, gs, jetR) + if not jet_p_groomed_lund: + continue + + obs_dict = None + for obs in self.observables: + if obs == "theta_g": + obs_dict = { + "p" : jet_p_groomed_lund.Delta() / jetR, + "h" : jet_h_groomed_lund.Delta() / jetR, + "ch": jet_ch_groomed_lund.Delta() / jetR } + elif obs == "zg": + if self.skip_zg: + continue + obs_dict = { + "p" : jet_p_groomed_lund.z(), + "h" : jet_h_groomed_lund.z(), + "ch": jet_ch_groomed_lund.z() } + else: + raise ValueError("Unrecognized observable " + obs) + + for level in ["p", "h", "ch"]: + tw.fill_branch("%s_%s_%s" % (obs, level, gl), obs_dict[level]) + + + #--------------------------------------------------------------- + # Fill jet tree with (unscaled/raw) unmatched parton/hadron tracks + #--------------------------------------------------------------- + def fill_unmatched_jet_tree(self, tw, jetR, iev, jet): + + tw.fill_branch('iev', iev) + tw.fill_branch(self.level, jet) + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + # Groomed jet + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, gs, jetR) + if not jet_groomed_lund: + continue + + obs_val = None + for obs in self.observables: + if obs == "theta_g": + obs_val = jet_p_groomed_lund.Delta() / jetR + elif obs == "zg": + if self.skip_zg: + continue + obs_val = jet_p_groomed_lund.z() + + tw.fill_branch("%s_%s_%s" % (obs, self.level, gl), obs_val) + + + #--------------------------------------------------------------- + # Fill jet histograms for MPI-on PYTHIA run-through + #--------------------------------------------------------------- + def fill_MPI_histograms(self, jetR, jet): + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + label = "R" + str(jetR) + '_' + gl + + # Groomed jet + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, gs, jetR) + if not jet_groomed_lund: + continue + + obs_val = None + for obs in self.observables: + if obs == "theta_g": + obs_val = jet_groomed_lund.Delta() / jetR + elif obs == "zg": + if self.skip_zg: + continue + obs_val = jet_groomed_lund.z() + else: + raise ValueError("Unrecognized observable " + obs) + + getattr(self, 'h_JetPt_%s_ch_MPIon_%sScaled' % (obs, label)).Fill(jet.pt(), obs_val) + + + #--------------------------------------------------------------- + # Fill jet histograms + #--------------------------------------------------------------- + def fill_jet_histograms(self, jetR, jp, jh, jch): + + R_label = str(jetR) + 'Scaled' + + # Fill jet histograms which are not dependant on angualrity + if self.level in [None, 'ch']: + getattr(self, 'hJetPt_ch_R%s' % R_label).Fill(jch.pt()) + getattr(self, 'hNconstit_Pt_ch_R%s' % R_label).Fill(jch.pt(), len(jch.constituents())) + if self.level in [None, 'h']: + getattr(self, 'hJetPt_h_R%s' % R_label).Fill(jh.pt()) + getattr(self, 'hNconstit_Pt_h_R%s' % R_label).Fill(jh.pt(), len(jh.constituents())) + if self.level in [None, 'p']: + getattr(self, 'hJetPt_p_R%s' % R_label).Fill(jp.pt()) + getattr(self, 'hNconstit_Pt_p_R%s' % R_label).Fill(jp.pt(), len(jp.constituents())) + + if self.level == None: + if jp.pt(): # prevent divide by 0 + getattr(self, 'hJetPtRes_R%s' % R_label).Fill(jp.pt(), (jp.pt() - jch.pt()) / jp.pt()) + getattr(self, 'hResponse_JetPt_R%s' % R_label).Fill(jp.pt(), jch.pt()) + + # Fill angularity histograms and response matrices + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + self.fill_RMs(jetR, gs, gl, jp, jh, jch) + + + #--------------------------------------------------------------- + # Fill jet histograms + #--------------------------------------------------------------- + def fill_RMs(self, jetR, gs, gl, jp, jh, jch): + + # Groomed jets + gshop_chh = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) + jet_ch_groomed_lund = self.utils.groom(gshop_chh, gs, jetR) + if not jet_ch_groomed_lund: + return + + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jet_h_groomed_lund = self.utils.groom(gshop_h, gs, jetR) + if not jet_h_groomed_lund: + return + + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jet_p_groomed_lund = self.utils.groom(gshop_p, gs, jetR) + if not jet_p_groomed_lund: + return + + label = "R%s_%s" % (jetR, gl) + obs_dict = None + for obs in self.observables: + if obs == "theta_g": + obs_dict = { + "p" : jet_p_groomed_lund.Delta() / jetR, + "h" : jet_h_groomed_lund.Delta() / jetR, + "ch": jet_ch_groomed_lund.Delta() / jetR } + elif obs == "zg": + if self.skip_zg: + continue + obs_dict = { + "p" : jet_p_groomed_lund.z(), + "h" : jet_h_groomed_lund.z(), + "ch": jet_ch_groomed_lund.z() } + else: + raise ValueError("Unrecognized observable " + obs) + + if self.level in [None, 'ch']: + getattr(self, 'h_JetPt_%s_ch_MPIoff_%sScaled' % (obs, label)).Fill(jch.pt(), obs_dict['ch']) + + if self.level in [None, 'h']: + getattr(self, 'h_JetPt_%s_h_MPIoff_%sScaled' % (obs, label)).Fill(jh.pt(), obs_dict['h']) + + if self.level in [None, 'p']: + getattr(self, 'h_JetPt_%s_p_MPIoff_%sScaled' % (obs, label)).Fill(jp.pt(), obs_dict['p']) + + if self.level == None: + getattr(self, 'hResponse_%s_p_ch_MPIoff_%sScaled' % (obs, label)).Fill(obs_dict['p'], obs_dict['ch']) + + # Residual plots (with and without divisor in y-axis) + getattr(self, "hDiff_JetPt_%s_MPIoff_%sScaled" % (obs, label)).Fill( + jch.pt(), obs_dict['p'] - obs_dict['ch']) + if obs_dict['p']: # prevent divide by 0 + getattr(self, "hResidual_JetPt_%s_MPIoff_%sScaled" % (obs, label)).Fill( + jp.pt(), (obs_dict['p'] - obs_dict['ch']) / obs_dict['p']) + + # 4D response matrices for "forward folding" to ch level + x = ([jch.pt(), jp.pt(), obs_dict['ch'], obs_dict['p']]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_p_ch_MPIoff_%sScaled' % (obs, label)).Fill(x_array) + + x = ([jh.pt(), jp.pt(), obs_dict['h'], obs_dict['p']]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_p_h_MPIoff_%sScaled' % (obs, label)).Fill(x_array) + + + #--------------------------------------------------------------- + # Fill jet histograms for MPI (which are just the H-->CH RMs) + #--------------------------------------------------------------- + def fill_jet_histograms_MPI(self, jetR, jp, jh, jch): + + for i, gs in enumerate(self.grooming_settings): + gl = self.grooming_labels[i] + + # Groomed jets + gshop_chh = fjcontrib.GroomerShop(jch, jetR, self.reclustering_algorithm) + jet_ch_groomed_lund = self.utils.groom(gshop_chh, gs, jetR) + if not jet_ch_groomed_lund: + continue + + gshop_h = fjcontrib.GroomerShop(jh, jetR, self.reclustering_algorithm) + jet_h_groomed_lund = self.utils.groom(gshop_h, gs, jetR) + if not jet_h_groomed_lund: + continue + + gshop_p = fjcontrib.GroomerShop(jp, jetR, self.reclustering_algorithm) + jet_p_groomed_lund = self.utils.groom(gshop_p, gs, jetR) + if not jet_p_groomed_lund: + continue + + label = "R%s_%s" % (jetR, gl) + obs_dict = None + for obs in self.observables: + if obs == "theta_g": + obs_dict = { + "p" : jet_p_groomed_lund.Delta() / jetR, + "h" : jet_h_groomed_lund.Delta() / jetR, + "ch": jet_ch_groomed_lund.Delta() / jetR } + elif obs == "zg": + if self.skip_zg: + continue + obs_dict = { + "p" : jet_p_groomed_lund.z(), + "h" : jet_h_groomed_lund.z(), + "ch": jet_ch_groomed_lund.z() } + else: + raise ValueError("Unrecognized observable " + obs) + + # 4D response matrices for "forward folding" from h to ch level + x = ([jch.pt(), jh.pt(), obs_dict['ch'], obs_dict['h']]) + x_array = array('d', x) + getattr(self, 'hResponse_JetPt_%s_h_ch_MPIon_%sScaled' % (obs, label)).Fill(x_array) + + + #--------------------------------------------------------------- + # Initiate scaling of all histograms and print final simulation info + #--------------------------------------------------------------- + def scale_print_final_info(self, pythia, pythia_MPI): + + # Scale all jet histograms by the appropriate factor from generated cross section + # and the number of accepted events + if not self.no_scale: + scale_f = pythia.info.sigmaGen() / self.hNevents.GetBinContent(1) + print("Weight MPIoff histograms by (cross section)/(N events) =", scale_f) + MPI_scale_f = pythia_MPI.info.sigmaGen() / self.hNeventsMPI.GetBinContent(1) + print("Weight MPIon histograms by (cross section)/(N events) =", MPI_scale_f) + self.scale_jet_histograms(scale_f, MPI_scale_f) + print() + + print("N total final MPI-off events:", int(self.hNevents.GetBinContent(1)), "with", + int(pythia.info.nAccepted() - self.hNevents.GetBinContent(1)), + "events rejected at hadronization step") + self.hNevents.SetBinError(1, 0) + + print("N total final MPI-on events:", int(self.hNeventsMPI.GetBinContent(1)), "with", + int(pythia_MPI.info.nAccepted() - self.hNeventsMPI.GetBinContent(1)), + "events rejected at hadronization step") + self.hNeventsMPI.SetBinError(1, 0) + + for jetR in self.jetR_list: + jetR_str = str(jetR) + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + print(("For R=%s: %i jets cut at first match criteria; " + \ + "%i jets cut at second match criteria.") % + (str(jetR), count1, count2)) + print() + + + #--------------------------------------------------------------- + # Scale all jet histograms by sigma/N + #--------------------------------------------------------------- + def scale_jet_histograms(self, scale_f, MPI_scale_f): + + for jetR in self.jetR_list: + hist_list_name = "hist_list_R%s" % str(jetR) + for h in getattr(self, hist_list_name): + h.Scale(scale_f) + + hist_list_MPIon_name = "hist_list_MPIon_R%s" % str(jetR) + for h in getattr(self, hist_list_MPIon_name): + h.Scale(MPI_scale_f) + + +################################################################ +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', + prog=os.path.basename(__file__)) + pyconf.add_standard_pythia_args(parser) + # Could use --py-seed + parser.add_argument('--user-seed', help='PYTHIA starting seed', default=1111, type=int) + parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', + help='Output directory for generated ROOT file(s)') + parser.add_argument('--tree-output-fname', default="AnalysisResults.root", type=str, + help="Filename for the (unscaled) generated particle ROOT TTree") + parser.add_argument('--no-tree', default=False, action='store_true', + help="Do not save tree of particle information, only create histograms") + parser.add_argument('--no-match-level', help="Save simulation for only one level with " + \ + "no matching. Options: 'p', 'h', 'ch'", default=None, type=str) + parser.add_argument('--no-scale', help="Turn off rescaling all histograms by cross section / N", + action='store_true', default=False) + parser.add_argument('-c', '--config_file', action='store', type=str, default='config/angularity.yaml', + help="Path of config file for observable configurations") + args = parser.parse_args() + + if args.no_match_level not in [None, 'p', 'h', 'ch']: + print("ERROR: Unrecognized type %s. Please use 'p', 'h', or 'ch'" % args.type_only) + exit(1) + + # If invalid configFile is given, exit + if not os.path.exists(args.config_file): + print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) + sys.exit(0) + + # Use PYTHIA seed for event generation + if args.user_seed < 0: + args.user_seed = 1111 + + # Have at least 1 event + if args.nev < 1: + args.nev = 1 + + if args.py_noMPI: + print("\033[91m%s\033[00m" % "WARNING: py-noMPI flag ignored for this program") + time.sleep(3) + print() + + process = pythia_parton_hadron(config_file=args.config_file, output_dir=args.output_dir, args=args) + process.pythia_parton_hadron(args) diff --git a/pyjetty/alice_analysis/process/user/lund/process_data_lund.py b/pyjetty/alice_analysis/process/user/lund/process_data_lund.py new file mode 100755 index 000000000..6feb37b3f --- /dev/null +++ b/pyjetty/alice_analysis/process/user/lund/process_data_lund.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 + +""" + Analysis class to read a ROOT TTree of track information + and do jet-finding, and save basic histograms. + + Author: Ezra Lesser (elesser@berkeley.edu) with much code borrowed + from original script by James Mulligan (james.mulligan@berkeley.edu) +""" + +from __future__ import print_function + +# General +import os +import argparse +import numpy as np +from array import array + +# Data analysis and plotting +import ROOT + +# Fastjet via python (from external library heppy) +import fjext + +# Base class +from pyjetty.alice_analysis.process.user.substructure import process_data_base + +################################################################ +class ProcessData_ang(process_data_base.ProcessDataBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): + + # Initialize base class + super(ProcessData_ang, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + self.pt_bins = array('d', list(range(5, 301, 1))) + + # groomed jet splitting momentum fraction & angle + self.obs_bins_zg = np.linspace(0, 0.5, 101) + self.obs_bins_thetag = np.linspace(0, 1, 101) + + # ln(1/theta) + self.obs_bins_lundx = np.concatenate((np.array([-1000, -100, -10, -1]), np.linspace(0, 2, 81), np.array([10, 100, 1000]))) + # ln(kT) + self.obs_bins_lundy = np.concatenate((np.array([-1000, -100, -10]), np.linspace(-2, 2, 81), np.array([10, 100, 1000]))) + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_user_output_objects(self): + + for jetR in self.jetR_list: + + for observable in self.observable_list: + if observable != "lund": + raise ValueError("Observable %s is not implemented in this script" % observable) + + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + if self.is_pp or self.include_no_subtraction: + name = 'h_zg_thetag_JetPt_R%s_%s' % (jetR, obs_label) + h = ROOT.TH3F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(self.obs_bins_zg)-1, self.obs_bins_zg, + len(self.obs_bins_thetag)-1, self.obs_bins_thetag) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('#it{z}_{g}') + h.GetZaxis().SetTitle('#it{#theta}_{g}') + setattr(self, name, h) + + name = 'h_lund_JetPt_R%s_%s' % (jetR, obs_label) + h = ROOT.TH3F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(self.obs_bins_lundx)-1, self.obs_bins_lundx, + len(self.obs_bins_lundy)-1, self.obs_bins_lundy) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('ln(1/#it{#theta})') + h.GetZaxis().SetTitle('ln(#it{k}_{T}/GeV)') + setattr(self, name, h) + + if not self.is_pp: + # Pb-Pb: have several R_max for contituent subtraction + max_distance = self.max_distance if isinstance(self.max_distance, list) \ + else self.max_distance[jetR] + for R_max in max_distance: + name = 'h_zg_thetag_JetPt_R%s_%s_Rmax%s' % (jetR, obs_label, R_max) + h = ROOT.TH3F(name, name, len(self.pt_bins)-1, self.pt_bins, + len(self.obs_bins_zg)-1, self.obs_bins_zg, + len(self.obs_bins_thetag)-1, self.obs_bins_thetag) + h.GetXaxis().SetTitle('#it{p}_{T}^{ch jet}') + h.GetYaxis().SetTitle('#it{z}_{g}') + h.GetZaxis().SetTitle('#it{#theta}_{g}') + setattr(self, name, h) + + #--------------------------------------------------------------- + # This function is called once for each jet subconfiguration + #--------------------------------------------------------------- + def fill_jet_histograms(self, observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix): + + # Calculate observable + zg = jet_groomed_lund.z() + thetag = jet_groomed_lund.Delta() / jetR + # Values are already returned negative if jet_groomed_lund is empty + #if not jet_groomed_lund.pair().has_constituents() + + # Fill histograms + getattr(self, "h_zg_thetag_JetPt_R%s_%s%s" % (jetR, obs_label, suffix)).Fill(jet_pt_ungroomed, zg, thetag) + + # Fill jet lund planes + name = "h_lund_JetPt_R%s_%s%s" % (jetR, obs_label, suffix) + split = jet_groomed_lund + if split.pair().has_constituents(): + lnth = np.log(jetR / split.Delta()) + lnkt = np.log(split.kt()) + getattr(self, name).Fill(jet_pt_ungroomed, lnth, lnkt) + else: # Untagged jet -- add underflow value + getattr(self, name).Fill(jet_pt_ungroomed, -1e9, -1e9) + + +################################################################## +if __name__ == '__main__': + # Define arguments + parser = argparse.ArgumentParser(description='Process data') + parser.add_argument('-f', '--inputFile', action='store', + type=str, metavar='inputFile', + default='AnalysisResults.root', + help='Path of ROOT file containing TTrees') + parser.add_argument('-c', '--configFile', action='store', + type=str, metavar='configFile', + default='config/analysis_config.yaml', + help="Path of config file for analysis") + parser.add_argument('-o', '--outputDir', action='store', + type=str, metavar='outputDir', + default='./TestOutput', + help='Output directory for output to be written to') + + # Parse the arguments + args = parser.parse_args() + + print('Configuring...') + print('inputFile: \'{0}\''.format(args.inputFile)) + print('configFile: \'{0}\''.format(args.configFile)) + print('ouputDir: \'{0}\"'.format(args.outputDir)) + print('----------------------------------------------------------------') + + # If invalid inputFile is given, exit + if not os.path.exists(args.inputFile): + raise ValueError("File \"%s\" does not exist" % args.inputFile) + + # If invalid configFile is given, exit + if not os.path.exists(args.configFile): + raise ValueError("File \"%s\" does not exist" % args.configFile) + + analysis = ProcessData_ang( + input_file=args.inputFile, config_file=args.configFile, output_dir=args.outputDir) + analysis.process_data() diff --git a/pyjetty/alice_analysis/process/user/substructure/process_data_base.py b/pyjetty/alice_analysis/process/user/substructure/process_data_base.py index c973dd622..5c11150a8 100755 --- a/pyjetty/alice_analysis/process/user/substructure/process_data_base.py +++ b/pyjetty/alice_analysis/process/user/substructure/process_data_base.py @@ -3,21 +3,21 @@ """ Base class to read a ROOT TTree of track information and do jet-finding, and save basic histograms. - + To use this class, the following should be done: - Implement a user analysis class inheriting from this one, such as in user/james/process_data_XX.py You should implement the following functions: - initialize_user_output_objects() - fill_jet_histograms() - + - The histogram of the data should be named h_[obs]_JetPt_R[R]_[subobs]_[grooming setting] The grooming part is optional, and should be labeled e.g. zcut01_B0 — from CommonUtils::grooming_label({'sd':[zcut, beta]}) For example: h_subjet_z_JetPt_R0.4_0.1 For example: h_subjet_z_JetPt_R0.4_0.1_zcut01_B0 - You also should modify observable-specific functions at the top of common_utils.py - + Author: James Mulligan (james.mulligan@berkeley.edu) """ @@ -50,46 +50,54 @@ class ProcessDataBase(process_base.ProcessBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessDataBase, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) # Initialize configuration self.initialize_config() - + #--------------------------------------------------------------- # Initialize config file into class members #--------------------------------------------------------------- def initialize_config(self): - + # Call base class initialization process_base.ProcessBase.initialize_config(self) - + # Read config file with open(self.config_file, 'r') as stream: config = yaml.safe_load(stream) - + if self.do_constituent_subtraction: self.is_pp = False else: self.is_pp = True - + + # Whether or not to require jets to contain a track with some leading track pT + self.min_leading_track_pT = config["min_leading_track_pT"] if "min_leading_track_pT" in config else None + # Create dictionaries to store grooming settings and observable settings for each observable # Each dictionary entry stores a list of subconfiguration parameters # The observable list stores the observable setting, e.g. subjetR # The grooming list stores a list of grooming settings {'sd': [zcut, beta]} or {'dg': [a]} + # The observable names list stores the LaTeX formatted name of the observable for plots self.observable_list = config['process_observables'] self.obs_settings = {} self.obs_grooming_settings = {} + self.obs_names = {} for observable in self.observable_list: - + obs_config_dict = config[observable] obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] - + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] - self.obs_settings[observable] = self.utils.obs_settings(observable, obs_config_dict, obs_subconfig_list) + self.obs_settings[observable] = self.utils.obs_settings( + observable, obs_config_dict, obs_subconfig_list) self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) - + + self.obs_names[observable] = obs_config_dict["common_settings"]["xtitle"] + # Construct set of unique grooming settings self.grooming_settings = [] lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observable_list] @@ -97,15 +105,16 @@ def initialize_config(self): for setting in observable: if setting not in self.grooming_settings and setting != None: self.grooming_settings.append(setting) - + #--------------------------------------------------------------- # Main processing function #--------------------------------------------------------------- def process_data(self): - + self.start_time = time.time() - # Use IO helper class to convert ROOT TTree into a SeriesGroupBy object of fastjet particles per event + # Use IO helper class to convert ROOT TTree into a SeriesGroupBy object of + # fastjet particles per event print('--- {} seconds ---'.format(time.time() - self.start_time)) io = process_io.ProcessIO(input_file=self.input_file, track_tree_name='tree_Particle', is_pp=self.is_pp, use_ev_id_ext=True) @@ -113,49 +122,55 @@ def process_data(self): self.nEvents = len(self.df_fjparticles.index) self.nTracks = len(io.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + # Initialize histograms self.initialize_output_objects() - + # Create constituent subtractor, if configured if not self.is_pp: - self.constituent_subtractor = [CEventSubtractor(max_distance=R_max, alpha=self.alpha, max_eta=self.max_eta, bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) for R_max in self.max_distance] - + max_dist_li = self.max_distance if isinstance(self.max_distance, list) else \ + list(np.unique(np.concatenate(list(self.max_distance.values())))) + self.constituent_subtractor = { R_max : CEventSubtractor( + max_distance=R_max, alpha=self.alpha, max_eta=self.max_eta, + bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, + ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) \ + for R_max in max_dist_li} + print(self) # Find jets and fill histograms print('Analyze events...') self.analyze_events() - + # Plot histograms print('Save histograms...') process_base.ProcessBase.save_output_objects(self) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + #--------------------------------------------------------------- # Initialize histograms #--------------------------------------------------------------- def initialize_output_objects(self): - + # Initialize user-specific histograms self.initialize_user_output_objects() - + # Initialize base histograms self.hNevents = ROOT.TH1F('hNevents', 'hNevents', 2, -0.5, 1.5) if self.event_number_max < self.nEvents: self.hNevents.Fill(1, self.event_number_max) else: self.hNevents.Fill(1, self.nEvents) - + self.hTrackEtaPhi = ROOT.TH2F('hTrackEtaPhi', 'hTrackEtaPhi', 200, -1., 1., 628, 0., 6.28) self.hTrackPt = ROOT.TH1F('hTrackPt', 'hTrackPt', 300, 0., 300.) - + if not self.is_pp: self.hRho = ROOT.TH1F('hRho', 'hRho', 1000, 0., 1000.) - + for jetR in self.jetR_list: - + name = 'hZ_R{}'.format(jetR) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0., 1.) setattr(self, name, h) @@ -164,59 +179,63 @@ def initialize_output_objects(self): # Main function to loop through and analyze events #--------------------------------------------------------------- def analyze_events(self): - + # Fill track histograms print('--- {} seconds ---'.format(time.time() - self.start_time)) print('Fill track histograms') - [[self.fillTrackHistograms(track) for track in fj_particles] for fj_particles in self.df_fjparticles] + for fj_particles in self.df_fjparticles: + for track in fj_particles: + self.fillTrackHistograms(track) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + print('Find jets...') fj.ClusterSequence.print_banner() print() self.event_number = 0 - - # Use list comprehension to do jet-finding and fill histograms - result = [self.analyze_event(fj_particles) for fj_particles in self.df_fjparticles] - + + # Do jet-finding and fill histograms + for fj_particles in self.df_fjparticles: + self.analyze_event(fj_particles) + print('--- {} seconds ---'.format(time.time() - self.start_time)) print('Save thn...') process_base.ProcessBase.save_thn_th3_objects(self) - + #--------------------------------------------------------------- # Fill track histograms. #--------------------------------------------------------------- def fillTrackHistograms(self, track): - + self.hTrackEtaPhi.Fill(track.eta(), track.phi()) self.hTrackPt.Fill(track.pt()) - + #--------------------------------------------------------------- # Analyze jets of a given event. # fj_particles is the list of fastjet pseudojets for a single fixed event. #--------------------------------------------------------------- def analyze_event(self, fj_particles): - + self.event_number += 1 if self.event_number > self.event_number_max: return if self.debug_level > 1: print('-------------------------------------------------') print('event {}'.format(self.event_number)) - + if len(fj_particles) > 1: if np.abs(fj_particles[0].pt() - fj_particles[1].pt()) < 1e-10: print('WARNING: Duplicate particles may be present') print([p.user_index() for p in fj_particles]) print([p.pt() for p in fj_particles]) - + # Perform constituent subtraction for each R_max (do this once, for all jetR) if not self.is_pp: - fj_particles_subtracted = [self.constituent_subtractor[i].process_event(fj_particles) for i, R_max in enumerate(self.max_distance)] - + fj_particles_subtracted = { R_max : cs.process_event(fj_particles) for \ + R_max, cs in self.constituent_subtractor.items() } + # Loop through jetR, and process event for each R for jetR in self.jetR_list: - + # Keep track of whether to fill R-independent histograms self.fill_R_indep_hists = (jetR == self.jetR_list[0]) @@ -225,107 +244,113 @@ def analyze_event(self, fj_particles): jet_selector = fj.SelectorPtMin(5.0) & fj.SelectorAbsRapMax(0.9 - jetR) if self.debug_level > 2: print('jet definition is:', jet_def) - print('jet selector is:', jet_selector,'\n') - + print('jet selector is:', jet_selector) + if self.min_leading_track_pT: + print('*** Requiring minimum leading track pT:', self.min_leading_track_pT) + print() + # Analyze - if self.is_pp: - + if self.is_pp or self.include_no_subtraction: + # Do jet finding cs = fj.ClusterSequence(fj_particles, jet_def) jets = fj.sorted_by_pt(cs.inclusive_jets()) jets_selected = jet_selector(jets) - + self.analyze_jets(jets_selected, jetR) - - else: - - for i, R_max in enumerate(self.max_distance): - + + if not self.is_pp: + + max_distance = self.max_distance if isinstance(self.max_distance, list) else \ + self.max_distance[jetR] + + for R_max in max_distance: + if self.debug_level > 1: print('R_max: {}'.format(R_max)) - + # Keep track of whether to fill R_max-independent histograms - self.fill_Rmax_indep_hists = (i == 0) - + self.fill_Rmax_indep_hists = (R_max == max_distance[0]) + # Perform constituent subtraction - rho = self.constituent_subtractor[i].bge_rho.rho() + rho = self.constituent_subtractor[R_max].bge_rho.rho() if self.fill_R_indep_hists and self.fill_Rmax_indep_hists: getattr(self, 'hRho').Fill(rho) - + # Do jet finding (re-do each time, to make sure matching info gets reset) - cs = fj.ClusterSequence(fj_particles_subtracted[i], jet_def) + cs = fj.ClusterSequence(fj_particles_subtracted[R_max], jet_def) jets = fj.sorted_by_pt(cs.inclusive_jets()) jets_selected = jet_selector(jets) - + self.analyze_jets(jets_selected, jetR, R_max = R_max) - + #--------------------------------------------------------------- # Analyze jets of a given event. #--------------------------------------------------------------- def analyze_jets(self, jets_selected, jetR, R_max = None): - + # Set suffix for filling histograms if R_max: suffix = '_Rmax{}'.format(R_max) else: suffix = '' - + # Loop through jets and call user function to fill histos - result = [self.analyze_accepted_jet(jet, jetR, suffix) for jet in jets_selected] - + for jet in jets_selected: + self.analyze_accepted_jet(jet, jetR, suffix) + #--------------------------------------------------------------- # Fill histograms #--------------------------------------------------------------- def analyze_accepted_jet(self, jet, jetR, suffix): - + # Check additional acceptance criteria - if not self.utils.is_det_jet_accepted(jet): + if not self.utils.is_det_jet_accepted(jet, self.min_leading_track_pT): return - + # Fill base histograms jet_pt_ungroomed = jet.pt() - if self.is_pp or self.fill_Rmax_indep_hists: - + if self.is_pp or (len(suffix) and self.fill_Rmax_indep_hists): + hZ = getattr(self, 'hZ_R{}'.format(jetR)) for constituent in jet.constituents(): z = constituent.pt() / jet_pt_ungroomed hZ.Fill(jet_pt_ungroomed, z) - + # Loop through each jet subconfiguration (i.e. subobservable / grooming setting) - # Note that the subconfigurations are defined by the first observable, if multiple are defined - observable = self.observable_list[0] - for i in range(len(self.obs_settings[observable])): - - obs_setting = self.obs_settings[observable][i] - grooming_setting = self.obs_grooming_settings[observable][i] - obs_label = self.utils.obs_label(obs_setting, grooming_setting) - - # Groom jet, if applicable - if grooming_setting: - gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) - jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) - if not jet_groomed_lund: - continue - else: - jet_groomed_lund = None - - # Call user function to fill histograms - self.fill_jet_histograms(jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, - obs_label, jet_pt_ungroomed, suffix) + for observable in self.observable_list: + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + # Groom jet, if applicable + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) + if not jet_groomed_lund: + continue + else: + jet_groomed_lund = None + + # Call user function to fill histograms + self.fill_jet_histograms(observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix) #--------------------------------------------------------------- # This function is called once # You must implement this #--------------------------------------------------------------- def initialize_user_output_objects(self): - + raise NotImplementedError('You must implement initialize_user_output_objects()!') #--------------------------------------------------------------- # This function is called once for each jet subconfiguration # You must implement this #--------------------------------------------------------------- - def fill_jet_histograms(self, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, - obs_label, jet_pt_ungroomed, suffix): - + def fill_jet_histograms(self, observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_ungroomed, suffix): + raise NotImplementedError('You must implement fill_jet_histograms()!') diff --git a/pyjetty/alice_analysis/process/user/substructure/process_jewel_generated_base.py b/pyjetty/alice_analysis/process/user/substructure/process_jewel_generated_base.py old mode 100644 new mode 100755 index d9ee80fd9..dee910638 --- a/pyjetty/alice_analysis/process/user/substructure/process_jewel_generated_base.py +++ b/pyjetty/alice_analysis/process/user/substructure/process_jewel_generated_base.py @@ -66,11 +66,11 @@ def __init__(self, input_file='', config_file='', output_dir='', **kwargs): self.gridsizes = None if 'thermal_subtraction_method' in config: - self.thermal_subtraction_method = config['thermal_subtraction_method'] + self.thermal_subtraction_method = config['thermal_subtraction_method'].lower() if not self.thermal_subtraction_method: print('Will not do recoil subtraction') - elif 'gridsub' in self.thermal_subtraction_method.lower(): + elif 'gridsub' in self.thermal_subtraction_method: if 'gridsizes' in config: self.gridsizes = config['gridsizes'] else: @@ -102,14 +102,14 @@ def __init__(self, input_file='', config_file='', output_dir='', **kwargs): print('Constituent subtractor is enabled.') constituent_subtractor = config['constituent_subtractor'] - + max_distance = constituent_subtractor['max_distance'] alpha = constituent_subtractor['alpha'] max_eta = constituent_subtractor['max_eta'] bge_rho_grid_size = constituent_subtractor['bge_rho_grid_size'] max_pt_correct = constituent_subtractor['max_pt_correct'] ghost_area = constituent_subtractor['ghost_area'] - + self.constituent_subtractor = CEventSubtractor(max_distance=max_distance, alpha=alpha, max_eta=max_eta, @@ -129,11 +129,17 @@ def __init__(self, input_file='', config_file='', output_dir='', **kwargs): self.observable_list = config['process_observables'] self.obs_settings = {} self.obs_grooming_settings = {} + self.obs_names = {} for observable in self.observable_list: obs_config_dict = config[observable] obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + if "common_settings" in list(obs_config_dict.keys()) and \ + "xtitle" in list(obs_config_dict["common_settings"].keys()): + + self.obs_names[observable] = obs_config_dict["common_settings"]["xtitle"] + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] self.obs_settings[observable] = self.utils.obs_settings(observable, obs_config_dict, obs_subconfig_list) self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) @@ -149,15 +155,23 @@ def initialize_histos(self): if not self.thermal_subtraction_method: print('no histograms will be initialized for this setting in initialize_histos') - elif 'gridsub' in self.thermal_subtraction_method.lower(): + + elif 'gridsub' in self.thermal_subtraction_method: for jetR in self.jetR_list: for gridsize in self.gridsizes: name = 'h_thermal_fraction_not_subtracted_v_pT_R{}_gridsize{}'.format(jetR,gridsize) h = ROOT.TH2F(name,name,100,0,200,100,0,1.01) setattr(self,name,h) - elif '4momsub' in self.thermal_subtraction_method.lower(): + + elif '4momsub' in self.thermal_subtraction_method or \ + 'negative_recombiner' in self.thermal_subtraction_method: + print('no histograms will be initialized for this setting in initialize_histos') + else: + raise NotImplementedError( + "Subtraction method %s not implemented" % self.thermal_subtraction_method) + #--------------------------------------------------------------- # Main function #--------------------------------------------------------------- @@ -205,16 +219,21 @@ def load_data(self,input_fname): track_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9' if not self.recoils_off: track_criteria += ' and Status != 3' - bck_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9 and Status == 3' # Not used by default, unless constituent subtraction is activated or user accesses thermal particles in user function - elif 'gridsub' in self.thermal_subtraction_method.lower(): + # Not used by default, unless constituent subtraction is activated or + # user accesses thermal particles in user function + bck_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9 and Status == 3' + elif 'gridsub' in self.thermal_subtraction_method: track_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9 and Status != 3' bck_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9 and Status == 3' - elif '4momsub' in self.thermal_subtraction_method.lower(): + elif '4momsub' in self.thermal_subtraction_method: track_criteria = 'ParticleEta < 0.9 and ParticleEta > -0.9 and Status != 3' bck_criteria = 'ParticleEta < 0.9 and ParticleEta > -0.9 and Status == 3' + elif 'negative_recombiner' in self.thermal_subtraction_method: + track_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9 and Status != 3' + bck_criteria = 'ParticlePt > 1e-5 and ParticleEta < 0.9 and ParticleEta > -0.9 and Status == 3' self.jet_df = self.track_df.query(track_criteria) - self.bck_df = self.track_df.query(bck_criteria) + self.bck_df = None if self.recoils_off else self.track_df.query(bck_criteria) self.ev_idx = [] print('Transforming track dataframe into SeriesGroupBy object of fastjet particles per event.') @@ -226,7 +245,7 @@ def load_data(self,input_fname): # This function was adapted from alice_analysis/process/base/process_io.py #--------------------------------------------------------------- def group_fjparticles(self,df): - print("Transform the track dataframe into a series object of fastjet particles per event...") + print("Transform the track dataframe into a series object of fastjet particles per event...") # (i) Group dataframe by event track_df_grouped is a DataFrameGroupBy object with one track dataframe per event df_grouped = None df_grouped = df.groupby(['run_number', 'ev_id']) @@ -264,15 +283,8 @@ def analyze_events(self): print() self.event_number = 0 - # Use list comprehension to do jet-finding and fill histograms - if not self.thermal_subtraction_method or '4momsub' in self.thermal_subtraction_method.lower(): - if not self.thermal_subtraction_method: - print('No recoil removal method selected') - else: - print('Using 4MomSub method to remove recoils') - result = [self.analyze_event(fj_particles) for fj_particles in self.df_fjparticles] - print('--- {} seconds ---'.format(time.time() - self.start_time)) - elif 'gridsub' in self.thermal_subtraction_method.lower(): + # Do jet-finding and fill histograms + if self.thermal_subtraction_method and 'gridsub' in self.thermal_subtraction_method: print('Using GridSub method to remove recoils') for gridsize in self.gridsizes: print('Now doing the analysis for a gridsize =',gridsize) @@ -280,58 +292,77 @@ def analyze_events(self): self.populated_cells_w_constit = [] self.grid_dict[gridsize] = self.create_grids(gridsize) self.event_number = 0 - result = [self.analyze_event(fj_particles,gridsize) for fj_particles in self.df_fjparticles] - - if self.run_diagnostics: - self.event_number = 0 - result_diagnostics = [self.analyze_event(fj_particles,gridsize,True) for fj_particles in self.df_fjparticles] + for fj_particles in self.df_fjparticles: + self.analyze_event(fj_particles,gridsize) + if self.run_diagnostics: + self.event_number = 0 + self.analyze_event(fj_particles, gridsize, True) del self.grid_dict[gridsize] print('--- {} seconds ---'.format(time.time() - self.start_time)) + else: + if not self.thermal_subtraction_method: + print('No recoil removal method selected') + else: + print('Using %s method to remove recoils' % self.thermal_subtraction_method) + for fj_particles in self.df_fjparticles: + self.analyze_event(fj_particles) + print('--- {} seconds ---'.format(time.time() - self.start_time)) + #--------------------------------------------------------------- # Analyze jets of a given event. # fj_particles is the list of fastjet pseudojets for a single fixed event. #--------------------------------------------------------------- def analyze_event(self, fj_particles, gridsize=None, diagnostic=False): - self.event_bck_df = None - self.event_bck_df = self.bck_df[self.bck_df['ev_id']==self.ev_idx[self.event_number]] + if self.event_number > self.nEvents: + return + + try: + self.event_bck_df = self.bck_df[self.bck_df['ev_id'] == self.ev_idx[self.event_number]] + except: + self.event_bck_df = None #print(self.event_number) self.event_number += 1 - if self.event_number > self.nEvents: - return + + # Convert df of pt/eta/phi of thermal particles to list of + # fastjet particles, for convenience + thermal_particles = [] + thermal_particles_selected = [] + try: + if len(self.event_bck_df): + thermal_particles = self.get_fjparticles(self.event_bck_df) + # Drop specified fraction of thermal particles + # Loop manually since the wrapped functions are a bit funky + for i, p in enumerate(thermal_particles): + if np.random.uniform() >= self.thermal_rejection_fraction: + thermal_particles_selected.append(p) + #print(f'n_thermals before: {len(thermal_particles)}') + #print(f'n_thermals after: {len(thermal_particles_selected)}') + except TypeError: + # This means that self.event_bck_df = None + pass if len(fj_particles) > 1: - if np.abs(fj_particles[0].eta() - fj_particles[1].eta()) < 1e-10: + if np.abs(fj_particles[0].eta() - fj_particles[1].eta()) < 1e-10: if fj_particles[0].pt() < 1e-5: - print('WARNING: Duplicate DUMMY particles may be present in event',self.event_number) + print('WARNING: Duplicate DUMMY particles may be present in event', + self.event_number) else: - print('WARNING: Duplicate JET particles may be present in event',self.event_number) + print('WARNING: Duplicate JET particles may be present in event', + self.event_number) #[print(p.eta(),p.pt()) for p in fj_particles] # If constituent subtraction is enabled, perform subtraction on the event if self.constituent_subtractor: - # Convert df of pt/eta/phi of thermal particles to list of fastjet particles, for convenience - thermal_particles = [] - if len(self.event_bck_df) != 0: - thermal_particles = self.get_fjparticles(self.event_bck_df) - - # Drop specified fraction of thermal particles -- loop manually since the wrapped functions are a bit funky - thermal_particles_selected = [] - for i,p in enumerate(thermal_particles): - if np.random.uniform() >= self.thermal_rejection_fraction: - thermal_particles_selected.append(p) - #print(f'n_thermals before: {len(thermal_particles)}') - #print(f'n_thermals after: {len(thermal_particles_selected)}') - # Determine rho from thermal particles self.constituent_subtractor.bge_rho.set_particles(thermal_particles_selected) # Perform subtraction over full event (jet+recoil) fj_particles = self.constituent_subtractor.subtractor.subtract_event(fj_particles) - + #rho = self.constituent_subtractor.bge_rho.rho() #print(f'rho: {rho}') #print() @@ -342,14 +373,25 @@ def analyze_event(self, fj_particles, gridsize=None, diagnostic=False): jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) jet_selector = fj.SelectorPtMin(5.0) & fj.SelectorAbsRapMax(0.9 - jetR) + particles = fj_particles + # For negative pT treatment, add thermals and negative recombiner + if self.thermal_subtraction_method and \ + "negative_recombiner" in self.thermal_subtraction_method: + + for part in thermal_particles_selected: + part.set_user_index(-1) + particles.push_back(part) + + recombiner = fjext.NegativeEnergyRecombiner(-1) + jet_def.set_recombiner(recombiner) + # Do jet finding - jets_selected = None - cs = fj.ClusterSequence(fj_particles, jet_def) + cs = fj.ClusterSequence(particles, jet_def) jets = fj.sorted_by_pt(cs.inclusive_jets()) jets_selected = jet_selector(jets) # If no jets were selected, move on to next event - if len(jets_selected)==0: + if len(jets_selected) == 0: continue #---------------------------------------------- @@ -358,7 +400,11 @@ def analyze_event(self, fj_particles, gridsize=None, diagnostic=False): if not self.thermal_subtraction_method: subtracted_jets = jets_selected - elif '4momsub' in self.thermal_subtraction_method.lower(): + + elif "negative_recombiner" in self.thermal_subtraction_method: + subtracted_jets = jets_selected + + elif '4momsub' in self.thermal_subtraction_method: ''' ------------------------------------------------ Thermal subtraction using 4MomSub method @@ -379,7 +425,7 @@ def analyze_event(self, fj_particles, gridsize=None, diagnostic=False): if len(subtracted_jets) > 1: print('WARNING: Got more than one subtracted jet out of one input jet') - elif 'gridsub' in self.thermal_subtraction_method.lower(): + elif 'gridsub' in self.thermal_subtraction_method: ''' ------------------------------------------------ Thermal subtraction using GridSub1 method @@ -402,9 +448,14 @@ def analyze_event(self, fj_particles, gridsize=None, diagnostic=False): cs = fj.ClusterSequence(fj_subtracted_constituents, jet_def) subtracted_jets = fj.sorted_by_pt(cs.inclusive_jets()) + else: + raise NotImplementedError("Thermal subtraction method %s not recognized" % \ + self.thermal_subtraction_method) + #---------------------------------------------- - result = [self.analyze_accepted_jet(jet, jetR, gridsize, diagnostic) for jet in subtracted_jets] + for i, jet in enumerate(subtracted_jets): + self.analyze_accepted_jet(jet, jetR, gridsize, diagnostic) #--------------------------------------------------------------- # Fill histograms @@ -413,30 +464,42 @@ def analyze_accepted_jet(self, jet, jetR, gridsize, diagnostic=False): # Fill base histograms jet_pt_ungroomed = jet.pt() - + # Loop through each jet subconfiguration (i.e. subobservable / grooming setting) # Note that the subconfigurations are defined by the first observable, if multiple are defined - observable = self.observable_list[0] - for i in range(len(self.obs_settings[observable])): - + for observable in self.observable_list: + for i in range(len(self.obs_settings[observable])): obs_setting = self.obs_settings[observable][i] grooming_setting = self.obs_grooming_settings[observable][i] obs_label = self.utils.obs_label(obs_setting, grooming_setting) # Groom jet, if applicable + jet_def = fj.JetDefinition(self.reclustering_algorithm, jetR) if grooming_setting: - gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + # For negative_recombiner case, we set the negative recombiner + #if self.thermal_subtraction_method and \ + # "negative_recombiner" in self.thermal_subtraction_method: + + recombiner = fjext.NegativeEnergyRecombiner(-1) + jet_def.set_recombiner(recombiner) + + gshop = fjcontrib.GroomerShop(jet, jet_def) jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) if not jet_groomed_lund: continue + else: jet_groomed_lund = None # Call user function to fill histograms if not diagnostic: - self.fill_jet_histograms(jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed, gridsize) + self.fill_jet_histograms( + observable, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, + obs_label, jet_pt_ungroomed, gridsize) else: - self.fill_jet_histograms(jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed, gridsize, '_diagnostics') + self.fill_jet_histograms( + observable, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, + obs_label, jet_pt_ungroomed, gridsize, suffix='_diagnostics') #--------------------------------------------------------------- # GridSub1 subtraction method @@ -459,20 +522,23 @@ def subtract_thermal_gridsub1(self,jets,jetR,gridsize,diagnostic=False): jet_phi = jet.phi() if jet_phi > math.pi: - jet_phi -= 2.*math.pi + jet_phi -= 2. * math.pi # Add jet particles to grid - [self.populate_grid_with_constituents(constit,gridsize) for constit in jet.constituents()] + for constit in jet.constituents(): + self.populate_grid_with_constituents(constit, gridsize) # Subtract thermal particles from grid if not diagnostic: - [self.subtract_thermals_from_grid(th,gridsize,jet_eta,jet_phi,jetR) for th in range(0,len(self.event_bck_df))] - - - if self.total_thermal_momentum > 0 and not diagnostic: - unsubtracted_thermal_pT_fraction = self.unsubtracted_thermal_momentum/self.total_thermal_momentum - name = 'h_thermal_fraction_not_subtracted_v_pT_R{}_gridsize{}'.format(jetR,gridsize) - getattr(self,name).Fill(jet_pt,unsubtracted_thermal_pT_fraction) + for th in range(0, len(self.event_bck_df)): + self.subtract_thermals_from_grid(th, gridsize, jet_eta, jet_phi, jetR) + + if self.total_thermal_momentum > 0: + unsubtracted_thermal_pT_fraction = \ + self.unsubtracted_thermal_momentum / self.total_thermal_momentum + name = 'h_thermal_fraction_not_subtracted_v_pT_R{}_gridsize{}'.format( + jetR, gridsize) + getattr(self, name).Fill(jet_pt, unsubtracted_thermal_pT_fraction) # Create dataframe to store the surviving constituents jet_df = pd.DataFrame(columns = ['ParticlePt', 'ParticleEta', 'ParticlePhi','m']) @@ -647,8 +713,10 @@ def save_output_objects(self,option='recreate'): # This function is called once for each jet subconfiguration # You must implement this #--------------------------------------------------------------- - def fill_jet_histograms(self, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, - obs_label, jet_pt_ungroomed, suffix=None): + def fill_jet_histograms( + self, jet, jet_groomed_lund, jetR, obs_setting, grooming_setting, obs_label, + jet_pt_ungroomed, suffix=None): + raise NotImplementedError('You must implement fill_jet_histograms()!') #--------------------------------------------------------------- @@ -669,7 +737,7 @@ def subtract_thermal_4momsub(self,jet,jetR,diagnostic=False): for constit in jet.constituents(): if constit.pt() < 1e-5: - for th in range(0,len(self.event_bck_df)): + for th in range(0, len(self.event_bck_df)): th_eta = self.event_bck_df['ParticleEta'].iloc[th] th_phi = self.event_bck_df['ParticlePhi'].iloc[th] th_pt = self.event_bck_df['ParticlePt' ].iloc[th] @@ -709,4 +777,4 @@ def subtract_thermal_4momsub(self,jet,jetR,diagnostic=False): jet_df['ParticlePt'].values, jet_df['ParticleEta'].values, jet_df['ParticlePhi'].values, jet_df['m'].values, 0) - return fj_particles \ No newline at end of file + return fj_particles diff --git a/pyjetty/alice_analysis/process/user/substructure/process_mc_base.py b/pyjetty/alice_analysis/process/user/substructure/process_mc_base.py index 9c0cf7ae4..e25290147 100755 --- a/pyjetty/alice_analysis/process/user/substructure/process_mc_base.py +++ b/pyjetty/alice_analysis/process/user/substructure/process_mc_base.py @@ -3,21 +3,22 @@ """ Base class to read a ROOT TTree of track information and do jet-finding, and save basic histograms. - + To use this class, the following should be done: - Implement a user analysis class inheriting from this one, such as in user/james/process_mc_XX.py You should implement the following functions: + - (optional) calculate_observable() - initialize_user_output_objects_R() - fill_observable_histograms() - fill_matched_jet_histograms() - + - You should include the following histograms: - Response matrix: hResponse_JetPt_[obs]_R[R]_[subobs]_[grooming setting] - Residual distribution: hResidual_JetPt_[obs]_R[R]_[subobs]_[grooming setting] - You also should modify observable-specific functions at the top of common_utils.py - + Author: James Mulligan (james.mulligan@berkeley.edu) """ @@ -33,6 +34,9 @@ import ROOT import yaml import random +import argparse +import os +import sys # Fastjet via python (from external library heppy) import fastjet as fj @@ -46,6 +50,12 @@ from pyjetty.alice_analysis.process.base import thermal_generator from pyjetty.mputils import CEventSubtractor +# For generating pythia tests +from heppy.pythiautils import configuration as pyconf +import pythia8 +import pythiafjext +import pythiaext + # Prevent ROOT from stealing focus when plotting ROOT.gROOT.SetBatch(True) @@ -56,26 +66,26 @@ class ProcessMCBase(process_base.ProcessBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessMCBase, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) - + # Initialize configuration self.initialize_config() - + #--------------------------------------------------------------- # Initialize config file into class members #--------------------------------------------------------------- def initialize_config(self): - + # Call base class initialization process_base.ProcessBase.initialize_config(self) - + # Read config file with open(self.config_file, 'r') as stream: config = yaml.safe_load(stream) - - self.fast_simulation = config['fast_simulation'] + + self.fast_simulation = config['fast_simulation'] if 'fast_simulation' in config else False if 'jetscape' in config: self.jetscape = config['jetscape'] else: @@ -88,22 +98,22 @@ def initialize_config(self): self.matching_systematic = config['matching_systematic'] else: self.matching_systematic = False - self.dry_run = config['dry_run'] + self.dry_run = config['dry_run'] if 'dry_run' in config else False self.skip_deltapt_RC_histograms = True self.fill_RM_histograms = True - + self.jet_matching_distance = config['jet_matching_distance'] self.reject_tracks_fraction = config['reject_tracks_fraction'] if 'mc_fraction_threshold' in config: self.mc_fraction_threshold = config['mc_fraction_threshold'] - + if self.do_constituent_subtraction: self.is_pp = False self.emb_file_list = config['emb_file_list'] self.main_R_max = config['constituent_subtractor']['main_R_max'] else: self.is_pp = True - + if 'thermal_model' in config: self.thermal_model = True beta = config['thermal_model']['beta'] @@ -112,7 +122,11 @@ def initialize_config(self): self.thermal_generator = thermal_generator.ThermalGenerator(N_avg, sigma_N, beta) else: self.thermal_model = False - + + # Whether or not to require jets to contain a track with some leading track pT + self.min_leading_track_pT = config["min_leading_track_pT"] if \ + "min_leading_track_pT" in config else None + # Create dictionaries to store grooming settings and observable settings for each observable # Each dictionary entry stores a list of subconfiguration parameters # The observable list stores the observable setting, e.g. subjetR @@ -120,15 +134,18 @@ def initialize_config(self): self.observable_list = config['process_observables'] self.obs_settings = {} self.obs_grooming_settings = {} + self.obs_names = {} for observable in self.observable_list: - + obs_config_dict = config[observable] obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] - + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] self.obs_settings[observable] = self.utils.obs_settings(observable, obs_config_dict, obs_subconfig_list) self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) - + + self.obs_names[observable] = obs_config_dict["common_settings"]["xtitle"] + # Construct set of unique grooming settings self.grooming_settings = [] lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observable_list] @@ -136,16 +153,51 @@ def initialize_config(self): for setting in observable: if setting not in self.grooming_settings and setting != None: self.grooming_settings.append(setting) - + + # Flag for creating delta-observable histograms in Pb-Pb case. + # You can override this by setting the flag to True in your user class. + # NOTE: requires implementation of self.calculate_observable() and creation + # of user histograms with this form: + # 'hDeltaObs_%s_emb_R%s_%s%s' % (observable, jetR, obs_label, suffix) + self.fill_delta_obs = False + + # If set to true, generates PYTHIA8 events to simulate the TTree data + self.pythia_test = True + if self.pythia_test: + self.user_seed = 152322855 + self.pTHatMin = 235 + self.pTHatMax = -1 + parser = argparse.ArgumentParser() + pyconf.add_standard_pythia_args(parser) + parser.add_argument('--user-seed', help='PYTHIA starting seed', default=1111, type=int) + parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', + help='Output directory for generated ROOT file(s)') + parser.add_argument('-f', '--input_file', action='store', type=str, default='AnalysisResults.root', + help='Input ROOT file containing TTrees') + parser.add_argument('-c', '--config_file', action='store', type=str, default='config/angularity.yaml', + help="Path of config file for observable configurations") + args = parser.parse_args() + print(args) + mycfg = ['Beams:eCM=5020', 'Random:setSeed=on', 'Random:seed={}'.format(self.user_seed), + 'HardQCD:all=on', 'PhaseSpace:pTHatMin={}'.format(self.pTHatMin), + 'PhaseSpace:pTHatMax={}'.format(self.pTHatMax), "PhaseSpace:bias2Selection=off", + "111:mayDecay=on", "310:mayDecay=off", "3122:mayDecay=off", + "3112:mayDecay=off", "3222:mayDecay=off", "3312:mayDecay=off", + "3322:mayDecay=off", "3334:mayDecay=off"] + #mycfg.append('HadronLevel:all=off') + self.pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) + #self.pythia.stat() + #print() + #--------------------------------------------------------------- # Main processing function #--------------------------------------------------------------- def process_mc(self): - + self.start_time = time.time() - + # ------------------------------------------------------------------------ - + # Use IO helper class to convert detector-level ROOT TTree into # a SeriesGroupBy object of fastjet particles per event print('--- {} seconds ---'.format(time.time() - self.start_time)) @@ -160,7 +212,7 @@ def process_mc(self): self.nEvents_det = len(df_fjparticles_det.index) self.nTracks_det = len(io_det.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + # If jetscape, store also the negative status particles (holes) if self.jetscape: io_det_holes = process_io.ProcessIO(input_file=self.input_file, tree_dir=tree_dir, @@ -171,7 +223,7 @@ def process_mc(self): self.nEvents_det_holes = len(df_fjparticles_det_holes.index) self.nTracks_det_holes = len(io_det_holes.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + # ------------------------------------------------------------------------ # Use IO helper class to convert truth-level ROOT TTree into @@ -183,7 +235,7 @@ def process_mc(self): self.nEvents_truth = len(df_fjparticles_truth.index) self.nTracks_truth = len(io_truth.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + # If jetscape, store also the negative status particles (holes) if self.jetscape: io_truth_holes = process_io.ProcessIO(input_file=self.input_file, tree_dir=tree_dir, @@ -194,7 +246,25 @@ def process_mc(self): self.nEvents_truth_holes = len(df_fjparticles_truth_holes.index) self.nTracks_truth_holes = len(io_truth_holes.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) - + + if self.pythia_test: + nev = len(df_fjparticles_truth) + print("Generate %i simulated PYTHIA8 events..." % nev) + events = [ pythiafjext.vectorize_select( + self.pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False, 0.1396) + for i in range(nev) if self.pythia.next() ] + run_number = [self.user_seed] * nev + ev_id = list(range(nev)) + tuples = list(zip(run_number, ev_id)) + index = pandas.MultiIndex.from_tuples(tuples, names=["run_number", "ev_id"]) + df_fjparticles_pythia = pandas.DataFrame({"fj_particle": events}, index=index) + + # For now, just set both det and truth to thsi pythia gen + print("before:\n", df_fjparticles_truth) + df_fjparticles_truth = df_fjparticles_pythia + df_fjparticles_det = df_fjparticles_pythia + print("after:\n", df_fjparticles_truth) + # ------------------------------------------------------------------------ # Now merge the two SeriesGroupBy to create a groupby df with [ev_id, run_number, fj_1, fj_2] @@ -210,47 +280,53 @@ def process_mc(self): print('--- {} seconds ---'.format(time.time() - self.start_time)) # ------------------------------------------------------------------------ - + # Set up the Pb-Pb embedding object if not self.is_pp and not self.thermal_model: self.process_io_emb = process_io_emb.ProcessIO_Emb(self.emb_file_list, track_tree_name='tree_Particle', m=self.m) - + # ------------------------------------------------------------------------ # Initialize histograms if not self.dry_run: self.initialize_output_objects() - + # Create constituent subtractor, if configured - if self.do_constituent_subtraction: - self.constituent_subtractor = [CEventSubtractor(max_distance=R_max, alpha=self.alpha, max_eta=self.max_eta, bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) for R_max in self.max_distance] - + if not self.is_pp: + max_dist_li = self.max_distance if isinstance(self.max_distance, list) else \ + list(np.unique(np.concatenate(list(self.max_distance.values())))) + self.constituent_subtractor = { R_max : CEventSubtractor( + max_distance=R_max, alpha=self.alpha, max_eta=self.max_eta, + bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, + ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) \ + for R_max in max_dist_li} + print(self) - + # Find jets and fill histograms print('Find jets...') self.analyze_events() - + # Plot histograms print('Save histograms...') process_base.ProcessBase.save_output_objects(self) - + print('--- {} seconds ---'.format(time.time() - self.start_time)) #--------------------------------------------------------------- # Initialize histograms #--------------------------------------------------------------- def initialize_output_objects(self): - + self.hNevents = ROOT.TH1F('hNevents', 'hNevents', 2, -0.5, 1.5) self.hNevents.Fill(1, self.nEvents_det) - + self.hTrackEtaPhi = ROOT.TH2F('hTrackEtaPhi', 'hTrackEtaPhi', 200, -1., 1., 628, 0., 6.28) self.hTrackPt = ROOT.TH1F('hTrackPt', 'hTrackPt', 300, 0., 300.) - + if not self.is_pp: self.hRho = ROOT.TH1F('hRho', 'hRho', 1000, 0., 1000.) - + if not self.skip_deltapt_RC_histograms: name = 'hN_MeanPt' h = ROOT.TH2F(name, name, 200, 0, 5000, 200, 0., 2.) @@ -260,54 +336,58 @@ def initialize_output_objects(self): # Initialize histograms #--------------------------------------------------------------- def initialize_output_objects_R(self, jetR): - + # Call user-specific initialization self.initialize_user_output_objects_R(jetR) - + # Base histograms if self.is_pp: - + name = 'hJES_R{}'.format(jetR) h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 1.) setattr(self, name, h) - + name = 'hDeltaR_All_R{}'.format(jetR) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0., 2.) setattr(self, name, h) - + else: - - for R_max in self.max_distance: - + + max_distance = self.max_distance if isinstance(self.max_distance, list) else \ + self.max_distance[jetR] + + for R_max in max_distance: + name = 'hJES_R{}_Rmax{}'.format(jetR, R_max) h = ROOT.TH2F(name, name, 300, 0, 300, 200, -1., 1.) setattr(self, name, h) - + name = 'hDeltaPt_emb_R{}_Rmax{}'.format(jetR, R_max) h = ROOT.TH2F(name, name, 300, 0, 300, 400, -200., 200.) setattr(self, name, h) - + + # Random cone histograms if not self.skip_deltapt_RC_histograms: name = 'hDeltaPt_RC_beforeCS_R{}_Rmax{}'.format(jetR, R_max) h = ROOT.TH1F(name, name, 400, -200., 200.) setattr(self, name, h) - + name = 'hDeltaPt_RC_afterCS_R{}_Rmax{}'.format(jetR, R_max) h = ROOT.TH1F(name, name, 400, -200., 200.) setattr(self, name, h) - + name = 'hDeltaR_ppdet_pptrue_R{}_Rmax{}'.format(jetR, R_max) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0., 2.) setattr(self, name, h) - + name = 'hDeltaR_combined_ppdet_R{}_Rmax{}'.format(jetR, R_max) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0., 2.) setattr(self, name, h) - + name = 'hZ_Truth_R{}'.format(jetR) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0., 1.) setattr(self, name, h) - + name = 'hZ_Det_R{}'.format(jetR) h = ROOT.TH2F(name, name, 300, 0, 300, 100, 0., 1.) setattr(self, name, h) @@ -316,35 +396,42 @@ def initialize_output_objects_R(self, jetR): # Main function to loop through and analyze events #--------------------------------------------------------------- def analyze_events(self): - + # Fill track histograms if not self.dry_run: - [self.fill_track_histograms(fj_particles_det) for fj_particles_det in self.df_fjparticles['fj_particles_det']] - + for fj_particles_det in self.df_fjparticles['fj_particles_det']: + self.fill_track_histograms(fj_particles_det) + fj.ClusterSequence.print_banner() print() - + self.event_number = 0 - + for jetR in self.jetR_list: if not self.dry_run: self.initialize_output_objects_R(jetR) - + # Then can use list comprehension to iterate over the groupby and do jet-finding # simultaneously for fj_1 and fj_2 per event, so that I can match jets -- and fill histograms if self.jetscape: - result = [self.analyze_event(fj_particles_det, fj_particles_truth, fj_particles_det_holes, fj_particles_truth_holes) for fj_particles_det, fj_particles_truth, fj_particles_det_holes, fj_particles_truth_holes in zip(self.df_fjparticles['fj_particles_det'], self.df_fjparticles['fj_particles_truth'], self.df_fjparticles['fj_particles_det_holes'], self.df_fjparticles['fj_particles_truth_holes'])] + result = [self.analyze_event( + fj_particles_det, fj_particles_truth, fj_particles_det_holes, fj_particles_truth_holes) \ + for fj_particles_det, fj_particles_truth, fj_particles_det_holes, fj_particles_truth_holes \ + in zip(self.df_fjparticles['fj_particles_det'], self.df_fjparticles['fj_particles_truth'], + self.df_fjparticles['fj_particles_det_holes'], self.df_fjparticles['fj_particles_truth_holes'])] else: - result = [self.analyze_event(fj_particles_det, fj_particles_truth) for fj_particles_det, fj_particles_truth in zip(self.df_fjparticles['fj_particles_det'], self.df_fjparticles['fj_particles_truth'])] - + for fj_particles_det, fj_particles_truth in zip( + self.df_fjparticles['fj_particles_det'], self.df_fjparticles['fj_particles_truth']): + self.analyze_event(fj_particles_det, fj_particles_truth) + if self.debug_level > 0: for attr in dir(self): obj = getattr(self, attr) print('size of {}: {}'.format(attr, sys.getsizeof(obj))) - + print('Save thn...') process_base.ProcessBase.save_thn_th3_objects(self) - + #--------------------------------------------------------------- # Fill track histograms. #--------------------------------------------------------------- @@ -353,25 +440,27 @@ def fill_track_histograms(self, fj_particles_det): # Check that the entries exist appropriately # (need to check how this can happen -- but it is only a tiny fraction of events) if type(fj_particles_det) != fj.vectorPJ: + print("ERROR: fj_particles_det of type %s; expected %s" % (type(fj_particles_det), fj.vectorPJ)) return - + for track in fj_particles_det: self.hTrackEtaPhi.Fill(track.eta(), track.phi()) self.hTrackPt.Fill(track.pt()) - + + #--------------------------------------------------------------- # Analyze jets of a given event. # fj_particles is the list of fastjet pseudojets for a single fixed event. #--------------------------------------------------------------- def analyze_event(self, fj_particles_det, fj_particles_truth, fj_particles_det_holes=None, fj_particles_truth_holes=None): - + self.event_number += 1 if self.event_number > self.event_number_max: return if self.debug_level > 1: print('-------------------------------------------------') print('event {}'.format(self.event_number)) - + # Check that the entries exist appropriately # (need to check how this can happen -- but it is only a tiny fraction of events) if type(fj_particles_det) != fj.vectorPJ or type(fj_particles_truth) != fj.vectorPJ: @@ -381,39 +470,42 @@ def analyze_event(self, fj_particles_det, fj_particles_truth, fj_particles_det_h if type(fj_particles_det_holes) != fj.vectorPJ or type(fj_particles_truth_holes) != fj.vectorPJ: print('fj_particles_holes type mismatch -- skipping event') return - + if len(fj_particles_truth) > 1: if np.abs(fj_particles_truth[0].pt() - fj_particles_truth[1].pt()) < 1e-10: print('WARNING: Duplicate particles may be present') print([p.user_index() for p in fj_particles_truth]) print([p.pt() for p in fj_particles_truth]) - + # If Pb-Pb, construct embedded event (do this once, for all jetR) if not self.is_pp: - + # If thermal model, generate a thermal event and add it to the det-level particle list if self.thermal_model: fj_particles_combined_beforeCS = self.thermal_generator.load_event() - + # Form the combined det-level event # The pp-det tracks are each stored with a unique user_index >= 0 # (same index in fj_particles_combined and fj_particles_det -- which will be used in prong-matching) # The thermal tracks are each stored with a unique user_index < 0 - [fj_particles_combined_beforeCS.push_back(p) for p in fj_particles_det] + for p in fj_particles_det: + fj_particles_combined_beforeCS.push_back(p) # Main case: Get Pb-Pb event and embed it into the det-level particle list else: fj_particles_combined_beforeCS = self.process_io_emb.load_event() - + # Form the combined det-level event # The pp-det tracks are each stored with a unique user_index >= 0 # (same index in fj_particles_combined and fj_particles_det -- which will be used in prong-matching) # The Pb-Pb tracks are each stored with a unique user_index < 0 - [fj_particles_combined_beforeCS.push_back(p) for p in fj_particles_det] - + for p in fj_particles_det: + fj_particles_combined_beforeCS.push_back(p) + # Perform constituent subtraction for each R_max - fj_particles_combined = [self.constituent_subtractor[i].process_event(fj_particles_combined_beforeCS) for i, R_max in enumerate(self.max_distance)] - + fj_particles_combined = { R_max : cs.process_event(fj_particles_combined_beforeCS) for \ + R_max, cs in self.constituent_subtractor.items() } + if self.debug_level > 3: print([p.user_index() for p in fj_particles_truth]) print([p.pt() for p in fj_particles_truth]) @@ -421,13 +513,13 @@ def analyze_event(self, fj_particles_det, fj_particles_truth, fj_particles_det_h print([p.pt() for p in fj_particles_det]) print([p.user_index() for p in fj_particles_combined_beforeCS]) print([p.pt() for p in fj_particles_combined_beforeCS]) - + if self.dry_run: return # Loop through jetR, and process event for each R for jetR in self.jetR_list: - + # Keep track of whether to fill R-independent histograms self.fill_R_indep_hists = (jetR == self.jetR_list[0]) @@ -439,50 +531,55 @@ def analyze_event(self, fj_particles_det, fj_particles_truth, fj_particles_det_h print('') print('jet definition is:', jet_def) print('jet selector for det-level is:', jet_selector_det) + if self.min_leading_track_pT: + print('*** Requiring minimum leading track pT:', self.min_leading_track_pT) print('jet selector for truth-level matches is:', jet_selector_truth_matched) - + # Analyze if self.is_pp: - + # Find pp det and truth jets cs_det = fj.ClusterSequence(fj_particles_det, jet_def) jets_det_pp = fj.sorted_by_pt(cs_det.inclusive_jets()) jets_det_pp_selected = jet_selector_det(jets_det_pp) - + cs_truth = fj.ClusterSequence(fj_particles_truth, jet_def) jets_truth = fj.sorted_by_pt(cs_truth.inclusive_jets()) jets_truth_selected = jet_selector_det(jets_truth) jets_truth_selected_matched = jet_selector_truth_matched(jets_truth) - + self.analyze_jets(jets_det_pp_selected, jets_truth_selected, jets_truth_selected_matched, jetR) - + else: - - for i, R_max in enumerate(self.max_distance): - + + max_distance = self.max_distance if isinstance(self.max_distance, list) else \ + self.max_distance[jetR] + + # Keep track of whether to fill R_max-independent histograms + self.fill_Rmax_indep_hists = True + + for R_max in max_distance: + if self.debug_level > 1: print('') print('R_max: {}'.format(R_max)) - print('Total number of combined particles: {}'.format(len([p.pt() for p in fj_particles_combined_beforeCS]))) - print('After constituent subtraction {}: {}'.format(i, len([p.pt() for p in fj_particles_combined[i]]))) - - # Keep track of whether to fill R_max-independent histograms - self.fill_Rmax_indep_hists = (i == 0) - + print('Total number of combined particles:', len([p.pt() for p in fj_particles_combined_beforeCS])) + print('After constituent subtraction:', len([p.pt() for p in fj_particles_combined[R_max]])) + # Perform constituent subtraction on det-level, if applicable - self.fill_background_histograms(fj_particles_combined_beforeCS, fj_particles_combined[i], jetR, i) - + self.fill_background_histograms(fj_particles_combined_beforeCS, fj_particles_combined[R_max], jetR, R_max) + # Do jet finding (re-do each time, to make sure matching info gets reset) cs_det = fj.ClusterSequence(fj_particles_det, jet_def) jets_det_pp = fj.sorted_by_pt(cs_det.inclusive_jets()) jets_det_pp_selected = jet_selector_det(jets_det_pp) - + cs_truth = fj.ClusterSequence(fj_particles_truth, jet_def) jets_truth = fj.sorted_by_pt(cs_truth.inclusive_jets()) jets_truth_selected = jet_selector_det(jets_truth) jets_truth_selected_matched = jet_selector_truth_matched(jets_truth) - - cs_combined = fj.ClusterSequence(fj_particles_combined[i], jet_def) + + cs_combined = fj.ClusterSequence(fj_particles_combined[R_max], jet_def) jets_combined = fj.sorted_by_pt(cs_combined.inclusive_jets()) jets_combined_selected = jet_selector_det(jets_combined) @@ -491,82 +588,118 @@ def analyze_event(self, fj_particles_det, fj_particles_truth, fj_particles_det_h fj_particles_det_holes = fj_particles_det_holes, fj_particles_truth_holes = fj_particles_truth_holes) + # Don't fill R_max-independent histograms in future loops + self.fill_Rmax_indep_hists = False + #--------------------------------------------------------------- # Analyze jets of a given event. #--------------------------------------------------------------- def analyze_jets(self, jets_det_selected, jets_truth_selected, jets_truth_selected_matched, jetR, jets_det_pp_selected = None, R_max = None, fj_particles_det_holes = None, fj_particles_truth_holes = None): - + if self.debug_level > 1: print('Number of det-level jets: {}'.format(len(jets_det_selected))) - + # Fill det-level jet histograms (before matching) for jet_det in jets_det_selected: - + # Check additional acceptance criteria - # skip event if not satisfied -- since first jet in event is highest pt - if not self.utils.is_det_jet_accepted(jet_det): - if self.fill_R_indep_hists: - self.hNevents.Fill(0) + if not self.utils.is_det_jet_accepted(jet_det, self.min_leading_track_pT): if self.debug_level > 1: - print('event rejected due to jet acceptance') - return - + text = 'det jet rejected due to acceptance' + if self.min_leading_track_pT: + text += ' or leading track pT cut' + print(text) + continue + self.fill_det_before_matching(jet_det, jetR, R_max) - + # Fill truth-level jet histograms (before matching) for jet_truth in jets_truth_selected: - + + # Check additional acceptance criteria + if not self.utils.is_truth_jet_accepted(jet_truth, self.min_leading_track_pT): + if self.debug_level > 1: + print('truth jet rejected due to leading track pT cut') + continue + if self.is_pp or self.fill_Rmax_indep_hists: self.fill_truth_before_matching(jet_truth, jetR) - + # Loop through jets and set jet matching candidates for each jet in user_info if self.is_pp: - [[self.set_matching_candidates(jet_det, jet_truth, jetR, 'hDeltaR_All_R{}'.format(jetR)) for jet_truth in jets_truth_selected_matched] for jet_det in jets_det_selected] - else: + for jet_det in jets_det_selected: + if not self.utils.is_det_jet_accepted(jet_det, self.min_leading_track_pT): + continue + + for jet_truth in jets_truth_selected_matched: + if not self.utils.is_truth_jet_accepted(jet_truth, self.min_leading_track_pT): + continue + + self.set_matching_candidates(jet_det, jet_truth, jetR, 'hDeltaR_All_R{}'.format(jetR)) + + else: # Pb-Pb + for jet_det_pp in jets_det_pp_selected: + if not self.utils.is_det_jet_accepted(jet_det_pp, self.min_leading_track_pT): + continue + # First fill the combined-to-pp matches, then the pp-to-pp matches - [[self.set_matching_candidates(jet_det_combined, jet_det_pp, jetR, 'hDeltaR_combined_ppdet_R{{}}_Rmax{}'.format(R_max), fill_jet1_matches_only=True) for jet_det_pp in jets_det_pp_selected] for jet_det_combined in jets_det_selected] - [[self.set_matching_candidates(jet_det_pp, jet_truth, jetR, 'hDeltaR_ppdet_pptrue_R{{}}_Rmax{}'.format(R_max)) for jet_truth in jets_truth_selected_matched] for jet_det_pp in jets_det_pp_selected] - + for jet_det_combined in jets_det_selected: + if not self.utils.is_det_jet_accepted(jet_det_combined, self.min_leading_track_pT): + continue + + self.set_matching_candidates(jet_det_combined, jet_det_pp, jetR, + 'hDeltaR_combined_ppdet_R{{}}_Rmax{}'.format(R_max), fill_jet1_matches_only=True) + + for jet_truth in jets_truth_selected_matched: + if not self.utils.is_truth_jet_accepted(jet_truth, self.min_leading_track_pT): + continue + + self.set_matching_candidates( + jet_det_pp, jet_truth, jetR, 'hDeltaR_ppdet_pptrue_R{{}}_Rmax{}'.format(R_max)) + # Loop through jets and set accepted matches if self.is_pp: - hname = 'hJetMatchingQA_R{}'.format(jetR) - [self.set_matches_pp(jet_det, hname) for jet_det in jets_det_selected] + hname = 'hJetMatchingQA_R{}'.format(jetR) + for jet_det in jets_det_selected: + self.set_matches_pp(jet_det, hname) else: - hname = 'hJetMatchingQA_R{}_Rmax{}'.format(jetR, R_max) - [self.set_matches_AA(jet_det_combined, jetR, hname) for jet_det_combined in jets_det_selected] - + hname = 'hJetMatchingQA_R{}_Rmax{}'.format(jetR, R_max) + for jet_det_combined in jets_det_selected: + self.set_matches_AA(jet_det_combined, jetR, hname) + # Loop through jets and fill response histograms if both det and truth jets are unique match - result = [self.fill_jet_matches(jet_det, jetR, R_max, fj_particles_det_holes, fj_particles_truth_holes) for jet_det in jets_det_selected] + for jet_det in jets_det_selected: + self.fill_jet_matches( + jet_det, jetR, R_max, fj_particles_det_holes, fj_particles_truth_holes) #--------------------------------------------------------------- # Fill some background histograms #--------------------------------------------------------------- - def fill_background_histograms(self, fj_particles_combined_beforeCS, fj_particles_combined, jetR, i): + def fill_background_histograms(self, fj_particles_combined_beforeCS, fj_particles_combined, jetR, R_max): # Fill rho - rho = self.constituent_subtractor[i].bge_rho.rho() + rho = self.constituent_subtractor[R_max].bge_rho.rho() if self.fill_R_indep_hists and self.fill_Rmax_indep_hists: getattr(self, 'hRho').Fill(rho) - + # Fill random cone delta-pt before constituent subtraction if not self.skip_deltapt_RC_histograms: - R_max = self.max_distance[i] self.fill_deltapt_RC_histogram(fj_particles_combined_beforeCS, rho, jetR, R_max, before_CS=True) - + # Fill random cone delta-pt after constituent subtraction self.fill_deltapt_RC_histogram(fj_particles_combined, rho, jetR, R_max, before_CS=False) - + #--------------------------------------------------------------- # Fill delta-pt histogram #--------------------------------------------------------------- def fill_deltapt_RC_histogram(self, fj_particles, rho, jetR, R_max, before_CS=False): - + # Choose a random eta-phi in the fiducial acceptance phi = random.uniform(0., 2*np.pi) eta = random.uniform(-0.9+jetR, 0.9-jetR) - + # Loop through tracks and sum pt inside the cone pt_sum = 0. pt_sum_global = 0. @@ -574,14 +707,14 @@ def fill_deltapt_RC_histogram(self, fj_particles, rho, jetR, R_max, before_CS=Fa if self.utils.delta_R(track, eta, phi) < jetR: pt_sum += track.pt() pt_sum_global += track.pt() - + if before_CS: delta_pt = pt_sum - rho * np.pi * jetR * jetR getattr(self, 'hDeltaPt_RC_beforeCS_R{}_Rmax{}'.format(jetR, R_max)).Fill(delta_pt) else: delta_pt = pt_sum getattr(self, 'hDeltaPt_RC_afterCS_R{}_Rmax{}'.format(jetR, R_max)).Fill(delta_pt) - + # Fill mean pt if before_CS and self.fill_R_indep_hists and self.fill_Rmax_indep_hists: N_tracks = len(fj_particles) @@ -592,12 +725,12 @@ def fill_deltapt_RC_histogram(self, fj_particles, rho, jetR, R_max, before_CS=Fa # Fill truth jet histograms #--------------------------------------------------------------- def fill_truth_before_matching(self, jet, jetR): - + jet_pt = jet.pt() for constituent in jet.constituents(): z = constituent.pt() / jet.pt() getattr(self, 'hZ_Truth_R{}'.format(jetR)).Fill(jet.pt(), z) - + # Fill 2D histogram of truth (pt, obs) hname = 'h_{{}}_JetPt_Truth_R{}_{{}}'.format(jetR) self.fill_unmatched_jet_histograms(jet, jetR, hname) @@ -606,132 +739,160 @@ def fill_truth_before_matching(self, jet, jetR): # Fill det jet histograms #--------------------------------------------------------------- def fill_det_before_matching(self, jet, jetR, R_max): - + if self.is_pp or self.fill_Rmax_indep_hists: jet_pt = jet.pt() for constituent in jet.constituents(): z = constituent.pt() / jet_pt getattr(self, 'hZ_Det_R{}'.format(jetR)).Fill(jet_pt, z) - + # Fill groomed histograms if self.thermal_model: hname = 'h_{{}}_JetPt_R{}_{{}}_Rmax{}'.format(jetR, R_max) self.fill_unmatched_jet_histograms(jet, jetR, hname) - + #--------------------------------------------------------------- # This function is called once for each jet #--------------------------------------------------------------- def fill_unmatched_jet_histograms(self, jet, jetR, hname): # Loop through each jet subconfiguration (i.e. subobservable / grooming setting) - observable = self.observable_list[0] - for i in range(len(self.obs_settings[observable])): - - obs_setting = self.obs_settings[observable][i] - grooming_setting = self.obs_grooming_settings[observable][i] - obs_label = self.utils.obs_label(obs_setting, grooming_setting) - - # Groom jet, if applicable - if grooming_setting: - gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) - jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) - if not jet_groomed_lund: - continue - else: - jet_groomed_lund = None - - # Call user function to fill histograms - self.fill_observable_histograms(hname, jet, jet_groomed_lund, jetR, obs_setting, - grooming_setting, obs_label, jet.pt()) - + for observable in self.observable_list: + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + # Groom jet, if applicable + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) + if not jet_groomed_lund: + continue + else: + jet_groomed_lund = None + + # Call user function to fill histograms + self.fill_observable_histograms(observable, hname, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet.pt()) + #--------------------------------------------------------------- # Loop through jets and call user function to fill matched # histos if both det and truth jets are unique match. #--------------------------------------------------------------- def fill_jet_matches(self, jet_det, jetR, R_max, fj_particles_det_holes, fj_particles_truth_holes): - + # Set suffix for filling histograms if R_max: - suffix = '_Rmax{}'.format(R_max) + suffix = '_Rmax' + str(R_max) else: suffix = '' - + # Get matched truth jet if jet_det.has_user_info(): jet_truth = jet_det.python_info().match - + if jet_truth: - + jet_pt_det_ungroomed = jet_det.pt() jet_pt_truth_ungroomed = jet_truth.pt() JES = (jet_pt_det_ungroomed - jet_pt_truth_ungroomed) / jet_pt_truth_ungroomed - getattr(self, 'hJES_R{}{}'.format(jetR, suffix)).Fill(jet_pt_truth_ungroomed, JES) - + getattr(self, 'hJES_R' + str(jetR) + suffix).Fill(jet_pt_truth_ungroomed, JES) + # If Pb-Pb case, we need to keep jet_det, jet_truth, jet_pp_det jet_pp_det = None if not self.is_pp: - + # Get pp-det jet jet_pp_det = jet_truth.python_info().match - + # Fill delta-pt histogram if jet_pp_det: - jet_pp_det_pt = jet_pp_det.pt() - delta_pt = (jet_pt_det_ungroomed - jet_pp_det_pt) - getattr(self, 'hDeltaPt_emb_R{}_Rmax{}'.format(jetR, R_max)).Fill(jet_pt_truth_ungroomed, delta_pt) - + getattr(self, 'hDeltaPt_emb_R' + str(jetR) + suffix).Fill( + jet_pt_truth_ungroomed, jet_pt_det_ungroomed - jet_pp_det.pt()) + # Loop through each jet subconfiguration (i.e. subobservable / grooming setting) - observable = self.observable_list[0] - for i in range(len(self.obs_settings[observable])): - - obs_setting = self.obs_settings[observable][i] - grooming_setting = self.obs_grooming_settings[observable][i] - obs_label = self.utils.obs_label(obs_setting, grooming_setting) - - if self.debug_level > 3: - print('obs_label: {}'.format(obs_label)) - - # Groom jets, if applicable - if grooming_setting: - - # Groom det jet - gshop_det = fjcontrib.GroomerShop(jet_det, jetR, self.reclustering_algorithm) - jet_det_groomed_lund = self.utils.groom(gshop_det, grooming_setting, jetR) - if not jet_det_groomed_lund: - continue - - # Groom truth jet - gshop_truth = fjcontrib.GroomerShop(jet_truth, jetR, self.reclustering_algorithm) - jet_truth_groomed_lund = self.utils.groom(gshop_truth, grooming_setting, jetR) - if not jet_truth_groomed_lund: - continue - - else: - - jet_det_groomed_lund = None - jet_truth_groomed_lund = None - - # If jetscape, pass the list of holes within R of the jet to the user - holes_in_det_jet = None - holes_in_truth_jet = None - if self.jetscape: - holes_in_det_jet = [hadron for hadron in fj_particles_det_holes if jet_det.delta_R(hadron) < jetR] - holes_in_truth_jet = [hadron for hadron in fj_particles_truth_holes if jet_truth.delta_R(hadron) < jetR] - - # Get the corrected jet pt by subtracting the negative recoils within R - for hadron in holes_in_det_jet: - jet_pt_det_ungroomed -= hadron.pt() - - for hadron in holes_in_truth_jet: - jet_pt_truth_ungroomed -= hadron.pt() - - # Call user function to fill histos - self.fill_matched_jet_histograms(jet_det, jet_det_groomed_lund, jet_truth, - jet_truth_groomed_lund, jet_pp_det, jetR, - obs_setting, grooming_setting, obs_label, - jet_pt_det_ungroomed, jet_pt_truth_ungroomed, - R_max, suffix, holes_in_det_jet=holes_in_det_jet, - holes_in_truth_jet=holes_in_truth_jet) + for observable in self.observable_list: + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + if self.debug_level > 3: + print('obs_label:', obs_label) + + # Groom jets, if applicable + if grooming_setting: + + # Groom det jet + gshop_det = fjcontrib.GroomerShop(jet_det, jetR, self.reclustering_algorithm) + jet_det_groomed_lund = self.utils.groom(gshop_det, grooming_setting, jetR) + if not jet_det_groomed_lund: + continue + + # Groom truth jet + gshop_truth = fjcontrib.GroomerShop(jet_truth, jetR, self.reclustering_algorithm) + jet_truth_groomed_lund = self.utils.groom(gshop_truth, grooming_setting, jetR) + if not jet_truth_groomed_lund: + continue + + else: + + jet_det_groomed_lund = None + jet_truth_groomed_lund = None + + # If jetscape, pass the list of holes within R of the jet to the user + holes_in_det_jet = None + holes_in_truth_jet = None + if self.jetscape: + holes_in_det_jet = [hadron for hadron in fj_particles_det_holes if \ + jet_det.delta_R(hadron) < jetR] + holes_in_truth_jet = [hadron for hadron in fj_particles_truth_holes if \ + jet_truth.delta_R(hadron) < jetR] + + # Get the corrected jet pt by subtracting the negative recoils within R + for hadron in holes_in_det_jet: + jet_pt_det_ungroomed -= hadron.pt() + + for hadron in holes_in_truth_jet: + jet_pt_truth_ungroomed -= hadron.pt() + + # Fill delta-observable histograms + if self.fill_delta_obs and not self.is_pp: + + # Get pp-det jet + jet_pp_det = jet_truth.python_info().match + if jet_pp_det: + + # Groom jet, if applicable + jet_pp_det_groomed_lund = None + if grooming_setting: + gshop_pp_det = fjcontrib.GroomerShop( + jet_pp_det, jetR, self.reclustering_algorithm) + jet_pp_det_groomed_lund = self.utils.groom( + gshop_pp_det, grooming_setting, jetR) + if not jet_pp_det_groomed_lund: + continue + + obs_det = self.calculate_observable(observable, jet_det, + jet_det_groomed_lund, jetR, obs_setting, grooming_setting, + obs_label, jet_pt_det_ungroomed) + + obs_pp_det = self.calculate_observable(observable, jet_pp_det, + jet_pp_det_groomed_lund, jetR, obs_setting, grooming_setting, + obs_label, jet_pp_det.pt()) + + getattr(self, 'hDeltaObs_%s_emb_R%s_%s%s' % (observable, jetR, + obs_label, suffix)).Fill(jet_pt_truth_ungroomed, obs_det - obs_pp_det) + + # Call user function to fill histos + self.fill_matched_jet_histograms(observable, jet_det, jet_det_groomed_lund, + jet_truth, jet_truth_groomed_lund, jet_pp_det, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + R_max, suffix, holes_in_det_jet=holes_in_det_jet, + holes_in_truth_jet=holes_in_truth_jet) #--------------------------------------------------------------- # Fill response histograms -- common utility function @@ -742,43 +903,59 @@ def fill_response(self, observable, jetR, jet_pt_det_ungroomed, jet_pt_truth_ung if self.fill_RM_histograms: x = ([jet_pt_det_ungroomed, jet_pt_truth_ungroomed, obs_det, obs_truth]) x_array = array('d', x) - name = 'hResponse_JetPt_{}_R{}_{}'.format(observable, jetR, obs_label) + name = 'hResponse_JetPt_{}_R{}_{}'.format(observable, jetR, obs_label) if \ + len(obs_label) else 'hResponse_JetPt_{}_R{}'.format(observable, jetR) if not self.is_pp: name += '_Rmax{}'.format(R_max) getattr(self, name).Fill(x_array) - + if obs_truth > 1e-5: obs_resolution = (obs_det - obs_truth) / obs_truth - name = 'hResidual_JetPt_{}_R{}_{}'.format(observable, jetR, obs_label) + name = 'hResidual_JetPt_{}_R{}_{}'.format(observable, jetR, obs_label) if \ + len(obs_label) else 'hResidual_JetPt_{}_R{}'.format(observable, jetR) if not self.is_pp: name += '_Rmax{}'.format(R_max) getattr(self, name).Fill(jet_pt_truth_ungroomed, obs_truth, obs_resolution) - + # Fill prong-matched response - if not self.is_pp and R_max == self.main_R_max: - if prong_match: - - name = 'hResponse_JetPt_{}_R{}_{}_Rmax{}_matched'.format(observable, jetR, obs_label, R_max) + if not self.is_pp: + main_R_max = self.main_R_max if isinstance(self.main_R_max, float) else self.main_R_max[jetR] + if prong_match and R_max == main_R_max: + + name = 'hResponse_JetPt_{}_R{}_{}_Rmax{}_matched'.format( + observable, jetR, obs_label, R_max) if len(obs_label) else \ + 'hResponse_JetPt_{}_R{}_Rmax{}_matched'.format(observable, jetR, R_max) getattr(self, name).Fill(x_array) - + if obs_truth > 1e-5: - name = 'hResidual_JetPt_{}_R{}_{}_Rmax{}_matched'.format(observable, jetR, obs_label, R_max) + name = 'hResidual_JetPt_{}_R{}_{}_Rmax{}_matched'.format( + observable, jetR, obs_label, R_max) if len(obs_label) else \ + 'hResidual_JetPt_{}_R{}_Rmax{}_matched'.format(observable, jetR, R_max) getattr(self, name).Fill(jet_pt_truth_ungroomed, obs_truth, obs_resolution) + + #--------------------------------------------------------------- + # Calculate the observable given a jet + #--------------------------------------------------------------- + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + raise NotImplementedError('You must implement caclulate_observable()!') + #--------------------------------------------------------------- # This function is called once for each jetR # You must implement this #--------------------------------------------------------------- def initialize_user_output_objects_R(self, jetR): - + raise NotImplementedError('You must implement initialize_user_output_objects_R()!') #--------------------------------------------------------------- # This function is called once for each jet subconfiguration # You must implement this #--------------------------------------------------------------- - def fill_observable_histograms(self, hname, jet, jet_groomed_lund, jetR, obs_setting, - grooming_setting, obs_label, jet_pt_ungroomed): + def fill_observable_histograms(self, observable, hname, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): raise NotImplementedError('You must implement fill_observable_histograms()!') @@ -786,11 +963,18 @@ def fill_observable_histograms(self, hname, jet, jet_groomed_lund, jetR, obs_set # This function is called once for each matched jet subconfiguration # You must implement this #--------------------------------------------------------------- - def fill_matched_jet_histograms(self, jet_det, jet_det_groomed_lund, jet_truth, - jet_truth_groomed_lund, jet_pp_det, jetR, - obs_setting, grooming_setting, obs_label, - jet_pt_det_ungroomed, jet_pt_truth_ungroomed, - R_max, suffix, - **kwargs): + def fill_matched_jet_histograms(self, observable, jet_det, jet_det_groomed_lund, + jet_truth, jet_truth_groomed_lund, jet_pp_det, jetR, obs_setting, + grooming_setting, obs_label, jet_pt_det_ungroomed, jet_pt_truth_ungroomed, + R_max, suffix, **kwargs): raise NotImplementedError('You must implement fill_matched_jet_histograms()!') + + #--------------------------------------------------------------- + # Optional testing function for embedding + # Useful for background subtraction studies + #--------------------------------------------------------------- + def fill_jet_embedding_test_histograms(self): + + # Override default functionality in user code, if desired + return diff --git a/pyjetty/alice_analysis/process/user/substructure/process_parton_hadron_base.py b/pyjetty/alice_analysis/process/user/substructure/process_parton_hadron_base.py index 22918524d..206f20d6f 100755 --- a/pyjetty/alice_analysis/process/user/substructure/process_parton_hadron_base.py +++ b/pyjetty/alice_analysis/process/user/substructure/process_parton_hadron_base.py @@ -65,31 +65,31 @@ class ProcessPHBase(process_base.ProcessBase): # Constructor #--------------------------------------------------------------- def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, **kwargs): - + # Initialize base class super(ProcessPHBase, self).__init__(input_file, config_file, output_dir, debug_level, **kwargs) - + # Initialize configuration self.initialize_config() - + #--------------------------------------------------------------- # Initialize config file into class members #--------------------------------------------------------------- def initialize_config(self): - + # Call base class initialization process_base.ProcessBase.initialize_config(self) # C++ histogram rebinning functions - # Don't actually need any of these, but have to init to get other - # RUtil functions for some reason... + # Don't actually need any of these, but have to init to get other + # RUtil functions for some reason... self.histutils = ROOT.RUtil.HistUtils() - + # Read config file config = None with open(self.config_file, 'r') as stream: config = yaml.safe_load(stream) - + self.jet_matching_distance = config['jet_matching_distance'] # Load levels desired for the various RMs. @@ -112,15 +112,15 @@ def initialize_config(self): self.obs_settings = {} self.obs_grooming_settings = {} for observable in self.observable_list: - + obs_config_dict = config[observable] obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] - + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] self.obs_settings[observable] = self.utils.obs_settings( observable, obs_config_dict, obs_subconfig_list) self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) - + # Construct set of unique grooming settings self.grooming_settings = [] lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observable_list] @@ -130,7 +130,7 @@ def initialize_config(self): self.grooming_settings.append(setting) # These values set the step size and control memory usage - self.track_df_step_size = 1e5 + self.track_df_step_size = 20 #1e5 self.max_ev_storage = 5e4 fj.ClusterSequence.print_banner() @@ -139,16 +139,16 @@ def initialize_config(self): # Initialize empty storage dictionaries to store events for use in next iteration #--------------------------------------------------------------- def init_storage(self, level, MPI): - setattr(self, "df_fjparticles_storage_%s_MPI%s" % (level, MPI), + setattr(self, "df_fjparticles_storage_%s_MPI%s" % (level, MPI), { "run_number": [], "ev_id": [], "fj_particle": [] }) setattr(self, "run_numbers_storage_%s_MPI%s" % (level, MPI), []) setattr(self, "unique_ev_ids_per_run_storage_%s_MPI%s" % (level, MPI), []) - + #--------------------------------------------------------------- # Main processing function #--------------------------------------------------------------- def process_mc(self): - + self.start_time = time.time() # ------------------------------------------------------------------------ @@ -190,7 +190,7 @@ def process_mc(self): len(getattr(self, "df_fjparticles_storage_h_MPI"+MPI)["run_number"])) print("Load TTrees with MPI %s, iteration %i/%i (p) | %i/%i (h)..." % \ - (MPI, getattr(self, "df_iter_p_MPI"+MPI), max_iter_p, + (MPI, getattr(self, "df_iter_p_MPI"+MPI), max_iter_p, getattr(self, "df_iter_h_MPI"+MPI), max_iter_h)) self.load_trees_to_dict(MPI) #print('--- {} seconds ---'.format(time.time() - self.start_time)) @@ -202,7 +202,7 @@ def process_mc(self): self.analyze_events(MPI) print('\n--- {} seconds ---'.format(time.time() - self.start_time)) - + # ------------------------------------------------------------------------ print("Scale histograms by appropriate weighting...") @@ -214,14 +214,14 @@ def process_mc(self): # Plot histograms print('Save histograms...') process_base.ProcessBase.save_output_objects(self) - + print('--- {} seconds ---'.format(time.time() - self.start_time)) #--------------------------------------------------------------- # Intialize track tree readers and save sizes for proper looping #--------------------------------------------------------------- def init_tree_readers(self, MPI): - setattr(self, "io_p_MPI"+MPI, + setattr(self, "io_p_MPI"+MPI, process_io_parton_hadron.ProcessIO(input_file=self.input_file, level='p', MPI=MPI)) setattr(self, "tree_len_p_MPI"+MPI, getattr(self, "io_p_MPI"+MPI).tree_length) @@ -348,10 +348,31 @@ def append_df_fjparticles(self, level, MPI, df_to_append): if "fj" in key: getattr(self, name)[key][-1] += df_to_append[key][0] df_to_append[key].pop(0) + + print("old ev tree") + self.print_df(getattr(self, name)) + print("appending") + self.print_df(df_to_append) + # Merge dfs for key, val in df_to_append.items(): getattr(self, name)[key] += val + + #--------------------------------------------------------------- + # Function to look inside df in convenient way + #--------------------------------------------------------------- + def print_df(self, df): + if len(df["run_number"]) < 1: + print("empty df") + print("run_number", "ev_id", "pt\t\t", "eta\t\t", "phi", sep='\t') + for i, run_number in enumerate(df["run_number"]): + ev_id = df["ev_id"][i] + for j, fj_particle in enumerate(df["fj_particle"][i]): + print("%s\t" % run_number, ev_id, fj_particle.pt(), fj_particle.eta(), + fj_particle.phi(), sep='\t') + + #--------------------------------------------------------------- # Save the run_numbers and unique ev_ids as attributes for given io, level #--------------------------------------------------------------- @@ -432,7 +453,7 @@ def check_and_move_extra_runs(self, MPI): last_run_nums = [getattr(self, "df_fjparticles_p_MPI" + MPI)["run_number"][-1], getattr(self, "df_fjparticles_h_MPI" + MPI)["run_number"][-1], getattr(self, "df_fjparticles_ch_MPI" + MPI)["run_number"][-1]] - + if len(np.unique(last_run_nums)) != 1: last_run = min(last_run_nums) # assuming run_number is sorted in TTree @@ -470,14 +491,22 @@ def check_and_move_extra_evs(self, MPI): getattr(self, "df_fjparticles_h_MPI" + MPI)["ev_id"][-1], getattr(self, "df_fjparticles_ch_MPI" + MPI)["ev_id"][-1]] - if len(np.unique(last_ev_ids)) != 1: + last_ev = min(last_ev_ids) # assuming ev_id is sorted in TTree + # Unless there is only 1 ev, and only if it is the last event in the file, + # move last ev to storage to make sure we have all its tracks + if getattr(self, "df_fjparticles_p_MPI" + MPI)["ev_id"][0] == max(last_ev_ids): + # Look for a case where we have not exhausted the datafile + for level in ["p", "h"]: + if getattr(self, "tree_len_%s_MPI%s" % (level, MPI)) <= \ + getattr(self, "df_iter_%s_MPI%s" % (level, MPI)) * self.track_df_step_size: + last_ev -= 1 + break + else: + last_ev -= 1 + if self.debug_level > 1: + print("last_ev:", last_ev) - last_ev = min(last_ev_ids) # assuming ev_id is sorted in TTree - # Unless there is only 1 ev, move last ev to storage to make sure we have all its tracks - if getattr(self, "df_fjparticles_p_MPI" + MPI)["ev_id"][0] != last_ev: - last_ev -= 1 - if self.debug_level > 1: - print("last_ev:", last_ev) + if last_ev != min(last_ev_ids) or len(np.unique(last_ev_ids)) != 1: for i, level in zip([0, 1, 2], ["p", "h", "ch"]): if last_ev_ids[i] != last_ev: @@ -578,9 +607,9 @@ def pair_dictionary(self, MPI): df_fjparticles_h = getattr(self, "df_fjparticles_h_MPI" + MPI) df_fjparticles_ch = getattr(self, "df_fjparticles_ch_MPI" + MPI) - # Need to figure out which ev_id to save. - # For some reason, it can be the case that the ev_id does not exist - # at one level (e.g. parton), while it does at another (e.g. hadron). + # Need to figure out which ev_id to save. + # For some reason, it can be the case that the ev_id does not exist + # at one level (e.g. parton), while it does at another (e.g. hadron). # Deal with this by simply removing these bad run_numbers / ev_ids. # First, check if there are any bad run_numbers bad_index_p = None @@ -609,7 +638,7 @@ def pair_dictionary(self, MPI): for i in range(len(run_numbers_h))]) bad_index_ch = None - bad_runs_ch = np.setdiff1d(run_numbers_ch, run_numbers_p) + bad_runs_ch = bad_runs_p #np.setdiff1d(run_numbers_ch, run_numbers_p) if len(bad_runs_ch): print("bad_runs_ch:", bad_runs_ch) bad_index_ch = li_concat([ @@ -626,6 +655,7 @@ def pair_dictionary(self, MPI): run_index_h = 0 overall_index_ch = 0 run_index_ch = 0 + while run_index_p < len(run_numbers_p): ev_ids_p = unique_ev_ids_per_run_p[run_index_p][0] if run_numbers_p[run_index_p] in bad_runs_p: @@ -672,12 +702,11 @@ def pair_dictionary(self, MPI): overall_index_ch += len(ev_ids_ch) run_index_ch += 1 - ''' DEBUGGING CODE ONLY + # DEBUGGING CODE ONLY # Test random pairing rate by pairing incorrect events - bad_index_p[0] = bad_index_p[1] = True - bad_index_h[-1] = bad_index_h[0] = True - bad_index_ch[-2] = bad_index_ch[-1] = True - ''' + #bad_index_p[0] = bad_index_p[1] = True + #bad_index_h[-1] = bad_index_h[0] = True + #bad_index_ch[-2] = bad_index_ch[-1] = True [df_fjparticles_p["run_number"].pop(i) and df_fjparticles_p["ev_id"].pop(i) and \ df_fjparticles_p["fj_particle"].pop(i) for i in range(len(bad_index_p)-1, -1, -1) \ @@ -757,7 +786,7 @@ def analyze_events(self, MPI): for level in ['p', 'h', 'ch']: for fj_particles in df['fj_particles_%s' % level]: self.fill_track_histograms(fj_particles, level, MPI) - + self.event_number = 0 # match jets -- and fill histograms @@ -766,11 +795,11 @@ def analyze_events(self, MPI): self.analyze_event(fj_particles_p, fj_particles_h, fj_particles_ch, MPI) - if self.debug_level > 0: + if self.debug_level > 2: for attr in dir(self): obj = getattr(self, attr) print('size of {}: {}'.format(attr, sys.getsizeof(obj))) - + #--------------------------------------------------------------- # Fill track histograms. #--------------------------------------------------------------- @@ -784,7 +813,7 @@ def fill_track_histograms(self, fj_particles, level, MPI): for track in fj_particles: getattr(self, "hTrackEtaPhi_%s_MPI%s" % (level, MPI)).Fill(track.eta(), track.phi()) getattr(self, "hTrackPt_%s_MPI%s" % (level, MPI)).Fill(track.pt()) - + #--------------------------------------------------------------- # Analyze jets of a given event. # fj_particles is the list of fastjet pseudojets for a single fixed event. @@ -813,7 +842,7 @@ def analyze_event(self, fj_particles_p, fj_particles_h, fj_particles_ch, MPI): # Loop through jetR, and process event for each R for jetR in self.jetR_list: - + # Keep track of whether to fill R-independent histograms self.fill_R_indep_hists = (jetR == self.jetR_list[0]) @@ -861,14 +890,12 @@ def analyze_jets(self, jets_p_selected, jets_h_selected, jets_ch_selected, jetR, self.fill_level_before_matching(jet_h, jetR, 'h', MPI) for jet_ch in jets_ch_selected: self.fill_level_before_matching(jet_ch, jetR, 'ch', MPI) - + # Loop through jets and set jet matching candidates for each jet in user_info name_suffix = "MPI%s_R%s" % (MPI, str(jetR)) - for jet_p in jets_p_selected: - for jet_h in jets_h_selected: - self.set_matching_candidates(jet_p, 'p', jet_h, 'h', jetR, MPI) - for jet_h in jets_h_selected: + for jet_p in jets_p_selected: + self.set_matching_candidates(jet_p, 'p', jet_h, 'h', jetR, MPI) for jet_ch in jets_ch_selected: self.set_matching_candidates(jet_h, 'h', jet_ch, 'ch', jetR, MPI) @@ -889,7 +916,7 @@ def set_matching_candidates(self, jet1, level1, jet2, level2, jetR, MPI): deltaR = jet1.delta_R(jet2) getattr(self, "hDeltaR_All_%s_%s_MPI%s_R%s" % ( level1, level2, MPI, str(jetR))).Fill(jet1.pt(), deltaR) - + # Add a matching candidate to the list if it is within the geometrical cut if deltaR < self.jet_matching_distance * jetR: self.set_jet_info(jet1, level1, jet2, level2, deltaR) @@ -899,7 +926,7 @@ def set_matching_candidates(self, jet1, level1, jet2, level2, jetR, MPI): # Set 'jet_match' as a matching candidate in user_info of 'jet' #--------------------------------------------------------------- def set_jet_info(self, jet1, level1, jet2, level2, deltaR): - + # Get/create object to store list of matching candidates jet_user_info = None if jet1.has_user_info(): @@ -918,7 +945,7 @@ def set_jet_info(self, jet1, level1, jet2, level2, deltaR): if deltaR < jet_user_info.closest_jet_deltaR_ch: jet_user_info.closest_jet_ch = jet2 jet_user_info.closest_jet_deltaR_ch = deltaR - + jet1.set_python_info(jet_user_info) #--------------------------------------------------------------- @@ -934,9 +961,9 @@ def set_matches(self, jet_h, jetR, MPI): h_p.Fill('all', jet_h.pt(), 1) h_ch = getattr(self, hname_ch) h_ch.Fill('all', jet_h.pt(), 1) - + if jet_h.has_user_info(): - + jet_info_h = jet_h.python_info() if len(jet_info_h.matching_candidates_p) > 0: @@ -947,12 +974,12 @@ def set_matches(self, jet_h, jetR, MPI): # Match with parton-level jet if len(jet_info_h.matching_candidates_p) == 1: jet_p = jet_info_h.closest_jet_p - + # Check that the match is unique if jet_p.has_user_info(): jet_info_p = jet_p.python_info() if len(jet_info_p.matching_candidates_p) == 1: - + # Set accepted match jet_info_h.match_p = jet_p jet_h.set_python_info(jet_info_h) @@ -961,12 +988,12 @@ def set_matches(self, jet_h, jetR, MPI): # Match with charged-level jet if len(jet_info_h.matching_candidates_ch) == 1: jet_ch = jet_info_h.closest_jet_ch - + # Check that the match is unique if jet_ch.has_user_info(): jet_info_ch = jet_ch.python_info() if len(jet_info_ch.matching_candidates_ch) == 1: - + # Set accepted match jet_info_h.match_ch = jet_ch jet_h.set_python_info(jet_info_h) @@ -984,7 +1011,7 @@ def fill_level_before_matching(self, jet, jetR, level, MPI): getattr(self, 'hZ_%s' % label).Fill(jet_pt, z) self.fill_unmatched_jet_histograms(jet, jetR, level, MPI) - + #--------------------------------------------------------------- # This function is called once for each jet #--------------------------------------------------------------- @@ -1006,11 +1033,11 @@ def fill_unmatched_jet_histograms(self, jet, jetR, level, MPI): continue else: jet_groomed_lund = None - + # Call user function to fill histograms self.fill_observable_histograms(jet, jet_groomed_lund, jetR, level, MPI, obs_setting, grooming_setting, obs_label, jet.pt()) - + #--------------------------------------------------------------- # Loop through jets and call user function to fill matched # histos if both det and truth jets are unique match. @@ -1030,6 +1057,14 @@ def fill_jet_matches(self, jet_h, jetR, MPI): jet_pt_h_ungroomed = jet_h.pt() jet_pt_ch_ungroomed = jet_ch.pt() + # Check for weird events + if abs(jet_pt_p_ungroomed - jet_pt_h_ungroomed) > 100: + print("\nweird event!") + print("p jet momentum: (%s, %s, %s)" % (jet_p.px(), jet_p.py(), jet_p.pz()), "pT:", jet_p.pt()) + print("h jet momentum: (%s, %s, %s)" % (jet_h.px(), jet_h.py(), jet_h.pz()), "pT:", jet_h.pt()) + print("delta R: %s * jetR" % (jet_p.delta_R(jet_h) / jetR) ) + #exit() + JES = (jet_pt_h_ungroomed - jet_pt_p_ungroomed) / jet_pt_p_ungroomed getattr(self, 'hJES_p_h_%s' % suffix).Fill(jet_pt_p_ungroomed, JES) JES = (jet_pt_ch_ungroomed - jet_pt_h_ungroomed) / jet_pt_h_ungroomed @@ -1128,7 +1163,7 @@ def init_response(self, observable, obs_name, level_1, level_2, MPI, jetR, pt_bins, obs_bins, obs_label): # Another set of THn for full hadron folding - title = ["p_{T}^{%s jet}" % level_2, "p_{T}^{%s jet}" % level_1, + title = ["p_{T}^{%s jet}" % level_2, "p_{T}^{%s jet}" % level_1, "%s_{%s}^{%s}" % (obs_name, obs_label, level_2), "%s_{%s}^{%s}" % (obs_name, obs_label, level_1)] @@ -1168,7 +1203,7 @@ def fill_response(self, observable, level_1, level_2, MPI, jetR, jet_pt_level_1, x_array = array('d', [jet_pt_level_2, jet_pt_level_1, obs_level_2, obs_level_1]) getattr(self, "hResponse_JetPt_"+label).Fill(x_array) - + if self.make_th3s and obs_level_1 > 0: obs_resolution = (obs_level_2 - obs_level_1) / obs_level_1 getattr(self, "hResidual_JetPt_"+label).Fill(jet_pt_level_1, obs_level_1, obs_resolution) @@ -1178,7 +1213,7 @@ def fill_response(self, observable, level_1, level_2, MPI, jetR, jet_pt_level_1, # You must implement this #--------------------------------------------------------------- def initialize_user_output_objects_R(self, jetR, MPI): - + raise NotImplementedError('You must implement initialize_user_output_objects_R()!') #--------------------------------------------------------------- diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_LHC18qr.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_LHC18qr.sh new file mode 100755 index 000000000..d31f61dc6 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_LHC18qr.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f7-9) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_data_ang.py -c config/ang/PbPb/process_angularity_PbPb.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_LHC20g4_embedding.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_LHC20g4_embedding.sh new file mode 100755 index 000000000..691601718 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_LHC20g4_embedding.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_embedding.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_slurm_LHC18qr.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_slurm_LHC18qr.sh new file mode 100755 index 000000000..d7ef6daf8 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_slurm_LHC18qr.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_data" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18qr/570/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18qr.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_slurm_LHC20g4_embedding.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_slurm_LHC20g4_embedding.sh new file mode 100755 index 000000000..3450a4282 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/ang_slurm_LHC20g4_embedding.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_MC" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 1000 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_embedding.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_herwig_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_herwig_ang.sh new file mode 100755 index 000000000..298c4b7cc --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_herwig_ang.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_fastsim.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_jewel_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_jewel_ang.sh new file mode 100755 index 000000000..26e2fe9ed --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_jewel_ang.sh @@ -0,0 +1,46 @@ +#! /bin/bash + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-9) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_fastsim.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/${OUTPUT_PREFIX}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_pythia_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_pythia_ang.sh new file mode 100755 index 000000000..ad486f96d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/process_fastsim_pythia_ang.sh @@ -0,0 +1,46 @@ +#! /bin/bash + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-13) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_fastsim.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/${OUTPUT_PREFIX}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_herwig_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_herwig_ang.sh new file mode 100755 index 000000000..987a4ee55 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_herwig_ang.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fs_herwig" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/herwig_alice/tree_fastsim/266374/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_herwig_ang.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_jewel_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_jewel_ang.sh new file mode 100755 index 000000000..ba065fba2 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_jewel_ang.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fastsim" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_fastsim/798301/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 1000 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_jewel_ang.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_jewel_recoils_on_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_jewel_recoils_on_ang.sh new file mode 100755 index 000000000..1ed8eb0a4 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_jewel_recoils_on_ang.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fastsim" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_fastsim_recoils_on/996432/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_jewel_ang.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_pythia_ang.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_pythia_ang.sh new file mode 100755 index 000000000..be4af5e2d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/fastsim/slurm_fastsim_pythia_ang.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fastsim" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-600 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/pythia_alice/tree_fastsim/258314/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 600 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_pythia_ang.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_LHC18qr_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_LHC18qr_pTcut.sh new file mode 100755 index 000000000..490ca5e82 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_LHC18qr_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f7-9) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_data_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_LHC20g4_embedding_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_LHC20g4_embedding_pTcut.sh new file mode 100755 index 000000000..6ea85335c --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_LHC20g4_embedding_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_slurm_LHC18qr_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_slurm_LHC18qr_pTcut.sh new file mode 100755 index 000000000..d39dffd4b --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_slurm_LHC18qr_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_data" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18qr/570/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18qr_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_slurm_LHC20g4_embedding_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_slurm_LHC20g4_embedding_pTcut.sh new file mode 100755 index 000000000..5692b35e9 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/ang_slurm_LHC20g4_embedding_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_MC" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_embedding_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_herwig_ang_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_herwig_ang_pTcut.sh new file mode 100755 index 000000000..06652ab70 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_herwig_ang_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_fastsim_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_jewel_ang_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_jewel_ang_pTcut.sh new file mode 100755 index 000000000..b065c22c5 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_jewel_ang_pTcut.sh @@ -0,0 +1,46 @@ +#! /bin/bash + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-9) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_fastsim_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/${OUTPUT_PREFIX}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_pythia_ang_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_pythia_ang_pTcut.sh new file mode 100755 index 000000000..25b602acc --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/process_fastsim_pythia_ang_pTcut.sh @@ -0,0 +1,46 @@ +#! /bin/bash + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-13) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_fastsim_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/${OUTPUT_PREFIX}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_herwig_ang_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_herwig_ang_pTcut.sh new file mode 100755 index 000000000..de9495f8d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_herwig_ang_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fs_herwig" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/herwig_alice/tree_fastsim/266374/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_herwig_ang_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_jewel_ang_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_jewel_ang_pTcut.sh new file mode 100755 index 000000000..43dbd8c44 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_jewel_ang_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fastsim" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_fastsim/798301/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_jewel_ang_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_pythia_ang_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_pythia_ang_pTcut.sh new file mode 100755 index 000000000..ef426f963 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/fastsim/slurm_fastsim_pythia_ang_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fastsim" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/pythia_alice/tree_fastsim/258314/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_pythia_ang_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_LHC18qr_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_LHC18qr_randmass_pTcut.sh new file mode 100755 index 000000000..c4d3acad6 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_LHC18qr_randmass_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f7-9) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_data_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_randmass_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_LHC20g4_embedding_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_LHC20g4_embedding_randmass_pTcut.sh new file mode 100755 index 000000000..a01d5bfd0 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_LHC20g4_embedding_randmass_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_embedding_randmass_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_slurm_LHC18qr_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_slurm_LHC18qr_randmass_pTcut.sh new file mode 100755 index 000000000..267749d3f --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_slurm_LHC18qr_randmass_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_data_randmass" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18qr/570/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 1000 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18qr_randmass_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_slurm_LHC20g4_embedding_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_slurm_LHC20g4_embedding_randmass_pTcut.sh new file mode 100755 index 000000000..d5bb1bd15 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/randmass/ang_slurm_LHC20g4_embedding_randmass_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_MC_randmass" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_embedding_randmass_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/thermal_closure/ang_LHC20g4_embedding_thermal_closure_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/thermal_closure/ang_LHC20g4_embedding_thermal_closure_pTcut.sh new file mode 100755 index 000000000..9bfe721d9 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/thermal_closure/ang_LHC20g4_embedding_thermal_closure_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_thermal_closure_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/thermal_closure/ang_slurm_LHC20g4_embedding_thermal_closure_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/thermal_closure/ang_slurm_LHC20g4_embedding_thermal_closure_pTcut.sh new file mode 100755 index 000000000..47bc6bb16 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/thermal_closure/ang_slurm_LHC20g4_embedding_thermal_closure_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_thermal_closure" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_embedding_thermal_closure_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/treff/ang_LHC20g4_treff_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/treff/ang_LHC20g4_treff_pTcut.sh new file mode 100755 index 000000000..3aff440f9 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/treff/ang_LHC20g4_treff_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/leading_track_pTcut/process_angularity_PbPb_treff_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/treff/ang_slurm_LHC20g4_treff_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/treff/ang_slurm_LHC20g4_treff_pTcut.sh new file mode 100755 index 000000000..fe30b1f90 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/pTcut/treff/ang_slurm_LHC20g4_treff_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_treff" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_treff_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_LHC18qr_randmass.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_LHC18qr_randmass.sh new file mode 100755 index 000000000..ca32e2714 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_LHC18qr_randmass.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f7-9) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_data_ang.py -c config/ang/PbPb/process_angularity_PbPb_randmass.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_LHC20g4_embedding_randmass.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_LHC20g4_embedding_randmass.sh new file mode 100755 index 000000000..1cd1311d1 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_LHC20g4_embedding_randmass.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_embedding_randmass.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_slurm_LHC18qr_randmass.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_slurm_LHC18qr_randmass.sh new file mode 100755 index 000000000..cee5359fd --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_slurm_LHC18qr_randmass.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_data_randmass" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18qr/570/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 1000 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18qr_randmass.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_slurm_LHC20g4_embedding_randmass.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_slurm_LHC20g4_embedding_randmass.sh new file mode 100755 index 000000000..a65269817 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/randmass/ang_slurm_LHC20g4_embedding_randmass.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_MC_randmass" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-800 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 800 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_embedding_randmass.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_norecoils_nosubtraction.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_norecoils_nosubtraction.sh new file mode 100755 index 000000000..c19f648cb --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_norecoils_nosubtraction.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-8) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_jewel_truth_level_ang.py -c config/ang/PbPb/theory/JEWEL_norecoil_nosubtraction.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_recoils_negative_recombiner.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_recoils_negative_recombiner.sh new file mode 100755 index 000000000..843d1653c --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_recoils_negative_recombiner.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-8) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_jewel_truth_level_ang.py -c config/ang/PbPb/theory/JEWEL_yesrecoil_yessubtraction_negative_recombiner.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_recoils_nosubtraction.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_recoils_nosubtraction.sh new file mode 100755 index 000000000..43930f7b8 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_JEWEL_gen_level_recoils_nosubtraction.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-8) +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_jewel_truth_level_ang.py -c config/ang/PbPb/theory/JEWEL_yesrecoil_nosubtraction.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_analyze_JETSCAPE.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_analyze_JETSCAPE.sh new file mode 100755 index 000000000..c747f6796 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/process_analyze_JETSCAPE.sh @@ -0,0 +1,48 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Command line arguments +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +# Note: suffix depends on file structure of input file -- need to edit appropriately +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f7-8) +OUTPUT_DIR="/rstorage/jetscape/AnalysisResults/$JOB_ID/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /software/users/ezra/heppy/modules +module load heppy/1.0 +module use /software/users/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /software/users/james/jetscape-docker/JETSCAPE-analysis +python jetscape_analysis/analysis/analyze_events_TG3_ezra.py -c config/TG3_ezra.yaml -i $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/jetscape/AnalysisResults/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/jetscape/AnalysisResults/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_norecoils_nosubtraction.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_norecoils_nosubtraction.sh new file mode 100755 index 000000000..102402def --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_norecoils_nosubtraction.sh @@ -0,0 +1,34 @@ +#! /bin/bash + +#SBATCH --job-name=jwl_norec +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=2 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-319 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_gen/746611/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=25 +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_JEWEL_gen_level_norecoils_nosubtraction.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done + diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_recoils_negative_recombiner.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_recoils_negative_recombiner.sh new file mode 100755 index 000000000..0d9d2fcf9 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_recoils_negative_recombiner.sh @@ -0,0 +1,34 @@ +#! /bin/bash + +#SBATCH --job-name=jwl_rec_nosub +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=2 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-400 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_gen/851894/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=20 +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_JEWEL_gen_level_recoils_negative_recombiner.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done + diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_recoils_nosubtraction.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_recoils_nosubtraction.sh new file mode 100755 index 000000000..944ddc57d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_JEWEL_gen_level_recoils_nosubtraction.sh @@ -0,0 +1,34 @@ +#! /bin/bash + +#SBATCH --job-name=jwl_rec_nosub +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=2 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-400 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_gen/851894/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=20 +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_JEWEL_gen_level_recoils_nosubtraction.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done + diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_analyze_JETSCAPE.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_analyze_JETSCAPE.sh new file mode 100755 index 000000000..eb339aac8 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_analyze_JETSCAPE.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name=jetscape +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-290 +#SBATCH --output=/rstorage/jetscape/AnalysisResults/slurm-%A_%a.out + +FILE_PATHS='/rstorage/jetscape/JETSCAPE-AA-events/skim/497764/v3/files_TG3_5020_0_10.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 6 nodes * 20 cores active +FILES_PER_JOB=10 +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_analyze_JETSCAPE.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_pp_JEWEL_gen_level_norecoils_nosubtraction.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_pp_JEWEL_gen_level_norecoils_nosubtraction.sh new file mode 100755 index 000000000..9c5768db8 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/theory/slurm_pp_JEWEL_gen_level_norecoils_nosubtraction.sh @@ -0,0 +1,34 @@ +#! /bin/bash + +#SBATCH --job-name=jwl_pp +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=2 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-319 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_gen/823890/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=25 +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_JEWEL_gen_level_norecoils_nosubtraction.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done + diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/thermal_closure/ang_LHC20g4_embedding_thermal_closure.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/thermal_closure/ang_LHC20g4_embedding_thermal_closure.sh new file mode 100755 index 000000000..06ec65ff0 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/thermal_closure/ang_LHC20g4_embedding_thermal_closure.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_thermal_closure.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/thermal_closure/ang_slurm_LHC20g4_embedding_thermal_closure.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/thermal_closure/ang_slurm_LHC20g4_embedding_thermal_closure.sh new file mode 100755 index 000000000..4203cbf4d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/thermal_closure/ang_slurm_LHC20g4_embedding_thermal_closure.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_thermal_closure" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 1000 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_embedding_thermal_closure.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/treff/ang_LHC20g4_treff.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/treff/ang_LHC20g4_treff.sh new file mode 100755 index 000000000..7883b6ab6 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/treff/ang_LHC20g4_treff.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_mc_ang.py -c config/ang/PbPb/process_angularity_PbPb_treff.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/treff/ang_slurm_LHC20g4_treff.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/treff/ang_slurm_LHC20g4_treff.sh new file mode 100755 index 000000000..43279be0c --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/PbPb/treff/ang_slurm_LHC20g4_treff.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_treff" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=3 +#SBATCH --partition=long +#SBATCH --time=72:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC20g4/568/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 1000 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC20g4_treff.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC17pq.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC17pq.sh index af7340511..121cdd3c5 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC17pq.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC17pq.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis/ -python process/user/ang_pp/ang_data.py -c config/ang/process_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_data_ang.py -c config/ang/process_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC18b8.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC18b8.sh index b35d7da5c..123919ac5 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC18b8.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/ang_LHC18b8.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis -python process/user/ang_pp/ang_mc.py -c config/ang/process_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_mc_ang.py -c config/ang/process_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen0.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen0.sh index 89629bd31..491c18f2c 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen0.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen0.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis -python process/user/ang_pp/ang_fs.py -c config/ang/process_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_mc_ang.py -c config/ang/process_angularity_fastsim.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen1.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen1.sh index b1f588ca4..ff68c054d 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen1.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/fastsim/ang_LHC18b8_fastsim_gen1.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis -python process/user/ang_pp/ang_fs.py -c config/ang/process_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_mc_ang.py -c config/ang/process_angularity_fastsim.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/ang_gen.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/ang_gen.sh index 5aae7608d..3b06b4a2b 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/ang_gen.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/ang_gen.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis -python process/user/ang_pp/process_parton_hadron_ang.py -c config/ang/gen_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_parton_hadron_ang.py -c config/ang/gen_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/herwig_ang_slurm_old.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/herwig_ang_slurm_old.sh index d8dc4fe48..286f7974e 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/herwig_ang_slurm_old.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/herwig_ang_slurm_old.sh @@ -33,9 +33,9 @@ fi SEED=$(( ($CORE_IN_BIN - 1) * NEV_PER_JOB + 1111 )) -HERWIG_SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang_pp/herwig_infiles/$BIN/LHC_5020.run" -HERWIG_SCRIPT_MPI="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang_pp/herwig_infiles/$BIN/LHC_5020_MPI.run" -PYTHON_SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang_pp/herwig_parton_hadron.py" +HERWIG_SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang/herwig_infiles/$BIN/LHC_5020.run" +HERWIG_SCRIPT_MPI="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang/herwig_infiles/$BIN/LHC_5020_MPI.run" +PYTHON_SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang/herwig_parton_hadron.py" CONFIG="/home/ezra/pyjetty/pyjetty/alice_analysis/config/ang/gen_angularity.yaml" TEMP_OUTDIR="/storage/u/alice/AnalysisResults/ang/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" OUTDIR="/rstorage/alice/AnalysisResults/ang/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_ang_slurm_old.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_ang_slurm_old.sh index e4fa4b677..1768f0056 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_ang_slurm_old.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_ang_slurm_old.sh @@ -4,21 +4,22 @@ #SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 #SBATCH --partition=std #SBATCH --time=24:00:00 -#SBATCH --array=1-1120 +#SBATCH --array=1-200 #SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out # Center of mass energy in GeV ECM=5020 # Number of events per pT-hat bin (for statistics) -NEV_DESIRED=21000000 +#NEV_DESIRED=21000000 +NEV_DESIRED=1000000 # Lower edges of the pT-hat bins PTHAT_BINS=(5 7 9 12 16 21 28 36 45 57 70 85 99 115 132 150 169 190 212 235) echo "Number of pT-hat bins: ${#PTHAT_BINS[@]}" # Currently we have 8 nodes * 20 cores active -NCORES=1120 +NCORES=200 NEV_PER_JOB=$(( $NEV_DESIRED * ${#PTHAT_BINS[@]} / $NCORES )) echo "Number of events per job: $NEV_PER_JOB" NCORES_PER_BIN=$(( $NCORES / ${#PTHAT_BINS[@]} )) @@ -47,16 +48,24 @@ module use ~/pyjetty/modules module load pyjetty/1.0 echo "python is" $(which python) cd /home/ezra/analysis_env/ -SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang_pp/pythia_parton_hadron.py" -CONFIG="/home/ezra/pyjetty/pyjetty/alice_analysis/config/ang/gen_angularity.yaml" +SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang/pythia_parton_hadron.py" +CONFIG="/home/ezra/pyjetty/pyjetty/alice_analysis/config/ang/process_angularity.yaml" if $USE_PTHAT_MAX; then - echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX " + #echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX " + #pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED \ + # --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree \ + # --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX + echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree --no-match-level ch --pythiaopts HardQCD:all=on,PhaseSpace:pTHatMax=$PTHAT_MAX,ParticleDecays:limitTau0=on " pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED \ --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree \ - --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX + --no-match-level ch --pythiaopts HardQCD:all=on,PhaseSpace:pTHatMax=$PTHAT_MAX,111:mayDecay=on,310:mayDecay=off,3122:mayDecay=off,3112:mayDecay=off,3222:mayDecay=off,3312:mayDecay=off,3322:mayDecay=off,3334:mayDecay=off else + #pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR \ + # --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ + # --no-tree --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2 + echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree --no-match-level ch --pythiaopts HardQCD:all=on,ParticleDecays:limitTau0=on " pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR \ --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ - --no-tree --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2 + --no-tree --no-match-level ch --pythiaopts HardQCD:all=on,111:mayDecay=on,310:mayDecay=off,3122:mayDecay=off,3112:mayDecay=off,3222:mayDecay=off,3312:mayDecay=off,3322:mayDecay=off,3334:mayDecay=off fi diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_quark_gluon_ang_slurm.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_quark_gluon_ang_slurm.sh new file mode 100755 index 000000000..2686d8bb9 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/gen/old/pythia_quark_gluon_ang_slurm.sh @@ -0,0 +1,64 @@ +#! /bin/bash + +#SBATCH --job-name="pythiagen" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-280 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +# Center of mass energy in GeV +ECM=5020 + +# Number of events per pT-hat bin (for statistics) +NEV_DESIRED=10500000 + +# Lower edges of the pT-hat bins +#PTHAT_BINS=(5 7 9 12 16 21 28 36 45 57 70 85 99 115 132 150 169 190 212 235) +PTHAT_BINS=(5 7 9 12 16 21 28 36 45 57) +echo "Number of pT-hat bins: ${#PTHAT_BINS[@]}" + +# Currently we have 8 nodes * 20 cores active +NCORES=280 +NEV_PER_JOB=$(( $NEV_DESIRED * ${#PTHAT_BINS[@]} / $NCORES )) +echo "Number of events per job: $NEV_PER_JOB" +NCORES_PER_BIN=$(( $NCORES / ${#PTHAT_BINS[@]} )) +echo "Number of cores per pT-hat bin: $NCORES_PER_BIN" + +BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) / $NCORES_PER_BIN + 1)) +CORE_IN_BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) % $NCORES_PER_BIN + 1)) +PTHAT_MIN=${PTHAT_BINS[$(( $BIN - 1 ))]} +if [ $BIN -lt ${#PTHAT_BINS[@]} ]; then + USE_PTHAT_MAX=true + PTHAT_MAX=${PTHAT_BINS[$BIN]} + echo "Calculating bin $BIN (pThat=[$PTHAT_MIN,$PTHAT_MAX]) with core number $CORE_IN_BIN" +else + USE_PTHAT_MAX=false + echo "Calculating bin $BIN (pThat_min=$PTHAT_MIN) with core number $CORE_IN_BIN" +fi + +SEED=$(( ($CORE_IN_BIN - 1) * NEV_PER_JOB + 1111 )) + +# Do the PYTHIA simulation & matching +OUTDIR="/rstorage/alice/AnalysisResults/ang/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" +mkdir -p $OUTDIR +module use ~/heppy/modules +module load heppy/1.0 +module use ~/pyjetty/modules +module load pyjetty/1.0 +echo "python is" $(which python) +cd /home/ezra/analysis_env/ +SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/ang/pythia_quark_gluon_ang.py" +CONFIG="/home/ezra/pyjetty/pyjetty/alice_analysis/config/ang/leading_track_pTcut/process_angularity_pp_gen_pTcut.yaml" + +if $USE_PTHAT_MAX; then + echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX" + pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED \ + --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ + --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX +else + echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2" + pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR \ + --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ + --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2 +fi diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_LHC17pq_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_LHC17pq_pTcut.sh new file mode 100755 index 000000000..c17c30a29 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_LHC17pq_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f9-11) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_data_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_LHC18b8_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_LHC18b8_pTcut.sh new file mode 100755 index 000000000..2a75f3577 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_LHC18b8_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_slurm_LHC17pq_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_slurm_LHC17pq_pTcut.sh new file mode 100755 index 000000000..0d9d2156d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_slurm_LHC17pq_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_data" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC17pq/448/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC17pq_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_slurm_LHC18b8_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_slurm_LHC18b8_pTcut.sh new file mode 100755 index 000000000..fa27c5265 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/ang_slurm_LHC18b8_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_mc" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18b8/569/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18b8_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_LHC18b8_fastsim_gen0_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_LHC18b8_fastsim_gen0_pTcut.sh new file mode 100755 index 000000000..d5dc38a95 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_LHC18b8_fastsim_gen0_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-13) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_fastsim_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_LHC18b8_fastsim_gen1_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_LHC18b8_fastsim_gen1_pTcut.sh new file mode 100755 index 000000000..6cf25b885 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_LHC18b8_fastsim_gen1_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f8-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_fastsim_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_slurm_LHC18b8_fastsim_gen0_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_slurm_LHC18b8_fastsim_gen0_pTcut.sh new file mode 100755 index 000000000..58cda6e04 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_slurm_LHC18b8_fastsim_gen0_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fsg0" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/pythia_alice/tree_fastsim/258314/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18b8_fastsim_gen0_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_slurm_LHC18b8_fastsim_gen1_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_slurm_LHC18b8_fastsim_gen1_pTcut.sh new file mode 100755 index 000000000..026a2cabe --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/fastsim/ang_slurm_LHC18b8_fastsim_gen1_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_fsg1" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/herwig_alice/tree_fastsim/266374/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18b8_fastsim_gen1_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_LHC17pq_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_LHC17pq_randmass_pTcut.sh new file mode 100755 index 000000000..24497460d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_LHC17pq_randmass_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f9-11) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/ang/process_data_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_randmass_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_LHC18b8_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_LHC18b8_randmass_pTcut.sh new file mode 100755 index 000000000..a31dcb700 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_LHC18b8_randmass_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_randmass_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_slurm_LHC17pq_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_slurm_LHC17pq_randmass_pTcut.sh new file mode 100755 index 000000000..750225cb4 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_slurm_LHC17pq_randmass_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_rndm" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC17pq/448/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC17pq_randmass_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_slurm_LHC18b8_randmass_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_slurm_LHC18b8_randmass_pTcut.sh new file mode 100755 index 000000000..dc7b3ded3 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/randmass/ang_slurm_LHC18b8_randmass_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_rndm" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18b8/569/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18b8_randmass_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/treff/ang_LHC18b8_treff_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/treff/ang_LHC18b8_treff_pTcut.sh new file mode 100755 index 000000000..62eeeea2b --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/treff/ang_LHC18b8_treff_pTcut.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take two command line arguments -- (1) input file path, (2) output dir prefix +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/ang/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-10) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis +python process/user/ang/process_mc_ang.py -c config/ang/leading_track_pTcut/process_angularity_pp_treff_pTcut.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/treff/ang_slurm_LHC18b8_treff_pTcut.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/treff/ang_slurm_LHC18b8_treff_pTcut.sh new file mode 100755 index 000000000..637f0f7a4 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/pTcut/treff/ang_slurm_LHC18b8_treff_pTcut.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="ang_treff" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC18b8/569/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 7 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun ang_LHC18b8_treff_pTcut.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC17pq_randmass.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC17pq_randmass.sh index f5c0def84..e005c6dbc 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC17pq_randmass.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC17pq_randmass.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis/ -python process/user/ang_pp/ang_data.py -c config/ang/randmass_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_data_ang.py -c config/ang/process_angularity_randmass.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC18b8_randmass.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC18b8_randmass.sh index 6d42e2f27..f184bbd1b 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC18b8_randmass.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/randmass/ang_LHC18b8_randmass.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis -python process/user/ang_pp/ang_mc.py -c config/ang/randmass_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_mc_ang.py -c config/ang/process_angularity_randmass.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/ang/treff/ang_LHC18b8_treff.sh b/pyjetty/alice_analysis/slurm/sbatch/ang/treff/ang_LHC18b8_treff.sh index 5d9135ffe..4cca55646 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/ang/treff/ang_LHC18b8_treff.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/ang/treff/ang_LHC18b8_treff.sh @@ -44,7 +44,7 @@ module list # Run python script via pipenv cd /home/ezra/pyjetty/pyjetty/alice_analysis -python process/user/ang_pp/ang_mc.py -c config/ang/treff_angularity.yaml -f $INPUT_FILE -o $OUTPUT_DIR +python process/user/ang/process_mc_ang.py -c config/ang/process_angularity_treff.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder mv /rstorage/alice/AnalysisResults/ang/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/fastsim/process_fastsim_jewel_recoils_on.sh b/pyjetty/alice_analysis/slurm/sbatch/fastsim/process_fastsim_jewel_recoils_on.sh new file mode 100755 index 000000000..87905cdd6 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/fastsim/process_fastsim_jewel_recoils_on.sh @@ -0,0 +1,45 @@ +#! /bin/bash + +if [ "$1" != "" ]; then + INPUT_FILE=$1 + #echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +# Note: suffix depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f6-8) +echo $INPUT_FILE +echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/generators/jewel_alice/tree_fastsim_recoils_on/$JOB_ID/$OUTPUT_SUFFIX" +echo "Output dir: $OUTPUT_DIR" +mkdir -p $OUTPUT_DIR + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run main script +cd /home/ezra/pyjetty/pyjetty/alice_analysis/process/user/fastsim +python eff_smear.py -i $INPUT_FILE -o $OUTPUT_DIR --jewel + +# Move stdout to appropriate folder +mv /rstorage/generators/jewel_alice/tree_fastsim_recoils_on/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/generators/jewel_alice/tree_fastsim_recoils_on/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/sbatch/fastsim/slurm_fastsim_jewel_recoils_on.sh b/pyjetty/alice_analysis/slurm/sbatch/fastsim/slurm_fastsim_jewel_recoils_on.sh new file mode 100755 index 000000000..17e95bf63 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/fastsim/slurm_fastsim_jewel_recoils_on.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name=fastsim-jewel +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-1000 +#SBATCH --output=/rstorage/generators/jewel_alice/tree_fastsim_recoils_on/slurm-%A_%a.out + +FILE_PATHS='/rstorage/generators/jewel_alice/tree_gen/851894/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=8 +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID*FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun process_fastsim_jewel_recoils_on.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/gen/pythia_hf_gen_slurm.sh b/pyjetty/alice_analysis/slurm/sbatch/gen/pythia_hf_gen_slurm.sh new file mode 100755 index 000000000..168251e6c --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/gen/pythia_hf_gen_slurm.sh @@ -0,0 +1,63 @@ +#! /bin/bash + +#SBATCH --job-name="PythiaGen" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-110 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +# Center of mass energy in GeV +ECM=5020 + +# Number of events per pT-hat bin (for statistics) +NEV_DESIRED=1000000 + +# Lower edges of the pT-hat bins +PTHAT_BINS=(9 12 16 21 28 36 45 57 70 85 100) +echo "Number of pT-hat bins: ${#PTHAT_BINS[@]}" + +# Currently we have 8 nodes * 20 cores active +NCORES=110 +NEV_PER_JOB=$(( $NEV_DESIRED * ${#PTHAT_BINS[@]} / $NCORES )) +echo "Number of events per job: $NEV_PER_JOB" +NCORES_PER_BIN=$(( $NCORES / ${#PTHAT_BINS[@]} )) +echo "Number of cores per pT-hat bin: $NCORES_PER_BIN" + +BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) / $NCORES_PER_BIN + 1)) +CORE_IN_BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) % $NCORES_PER_BIN + 1)) +PTHAT_MIN=${PTHAT_BINS[$(( $BIN - 1 ))]} +if [ $BIN -lt ${#PTHAT_BINS[@]} ]; then + USE_PTHAT_MAX=true + PTHAT_MAX=${PTHAT_BINS[$BIN]} + echo "Calculating bin $BIN (pThat=[$PTHAT_MIN,$PTHAT_MAX]) with core number $CORE_IN_BIN" +else + USE_PTHAT_MAX=false + echo "Calculating bin $BIN (pThat_min=$PTHAT_MIN) with core number $CORE_IN_BIN" +fi + +SEED=$(( ($CORE_IN_BIN - 1) * NEV_PER_JOB + 1111 )) + +# Do the PYTHIA simulation & matching +OUTDIR="/rstorage/alice/AnalysisResults/ang/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" +mkdir -p $OUTDIR +module use /software/users/ezra/heppy/modules +module load heppy/1.0 +module use /software/users/ezra/pyjetty/modules +module load pyjetty/1.0 +echo "python is" $(which python) +cd /software/users/ezra/analysis_env/ +SCRIPT="/software/users/ezra/pyjetty/pyjetty/alihfjets/dev/hfjet/process/user/hf_EEC/pythia_quark_gluon_ezra.py" +CONFIG="/software/users/ezra/pyjetty/pyjetty/alice_analysis/config/ang/mass_R0.2_ptbin2.yaml" + +if $USE_PTHAT_MAX; then + echo "python $SCRIPT -o $OUTDIR -c $CONFIG --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --replaceKP 1 --chinitscat 3 --pythiaopts HardQCD:all=on,PhaseSpace:pTHatMax=$PTHAT_MAX" + python $SCRIPT -o $OUTDIR -c $CONFIG --user-seed $SEED \ + --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ + --replaceKP 1 --chinitscat 3 --pythiaopts HardQCD:all=on,PhaseSpace:pTHatMax=$PTHAT_MAX +else + echo "python $SCRIPT -o $OUTDIR -c $CONFIG --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --replaceKP 1 --chinitscat 3 --pythiaopts HardQCD:all=on" + python $SCRIPT -o $OUTDIR -c $CONFIG --user-seed $SEED \ + --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ + --replaceKP 1 --chinitscat 3 --pythiaopts HardQCD:all=on +fi diff --git a/pyjetty/alice_analysis/slurm/sbatch/lund/lund_LHC17pq.sh b/pyjetty/alice_analysis/slurm/sbatch/lund/lund_LHC17pq.sh new file mode 100755 index 000000000..ad0071d31 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/lund/lund_LHC17pq.sh @@ -0,0 +1,50 @@ +#! /bin/bash + +# This script takes an input file path as an argument, and runs a python script to +# process the input file and write an output ROOT file. +# The main use is to give this script to a slurm script. + +# Take three command line arguments -- (1) input file path, (2) job ID, (3) task ID +if [ "$1" != "" ]; then + INPUT_FILE=$1 + echo "Input file: $INPUT_FILE" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + JOB_ID=$2 + echo "Job ID: $JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$3" != "" ]; then + TASK_ID=$3 + echo "Task ID: $TASK_ID" +else + echo "Wrong command line arguments" +fi + +# Define output path from relevant sub-path of input file +OUTPUT_PREFIX="AnalysisResults/lund/$JOB_ID" +# Note: depends on file structure of input file -- need to edit appropriately for each dataset +OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f9-11) +#echo $OUTPUT_SUFFIX +OUTPUT_DIR="/rstorage/alice/$OUTPUT_PREFIX/$OUTPUT_SUFFIX/" +mkdir -p $OUTPUT_DIR +echo "Output dir: $OUTPUT_DIR" + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Run python script via pipenv +cd /home/ezra/pyjetty/pyjetty/alice_analysis/ +python process/user/lund/process_data_lund.py -c config/lund/process_lund.yaml -f $INPUT_FILE -o $OUTPUT_DIR + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/lund/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/lund/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/sbatch/lund/lund_slurm_LHC17pq.sh b/pyjetty/alice_analysis/slurm/sbatch/lund/lund_slurm_LHC17pq.sh new file mode 100755 index 000000000..96c464101 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/lund/lund_slurm_LHC17pq.sh @@ -0,0 +1,33 @@ +#! /bin/bash + +#SBATCH --job-name="lunddata" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-640 +#SBATCH --output=/rstorage/alice/AnalysisResults/lund/slurm-%A_%a.out + +FILE_PATHS='/rstorage/alice/data/LHC17pq/448/files.txt' +NFILES=$(wc -l < $FILE_PATHS) +echo "N files to process: ${NFILES}" + +# Currently we have 8 nodes * 20 cores active +FILES_PER_JOB=$(( $NFILES / 640 + 1 )) +echo "Files per job: $FILES_PER_JOB" + +STOP=$(( SLURM_ARRAY_TASK_ID * FILES_PER_JOB )) +START=$(( $STOP - $(( $FILES_PER_JOB - 1 )) )) + +if (( $STOP > $NFILES )) +then + STOP=$NFILES +fi + +echo "START=$START" +echo "STOP=$STOP" + +for (( JOB_N = $START; JOB_N <= $STOP; JOB_N++ )) +do + FILE=$(sed -n "$JOB_N"p $FILE_PATHS) + srun lund_LHC17pq.sh $FILE $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID +done diff --git a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/herwig_theta_g_slurm.sh b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/herwig_theta_g_slurm.sh index e17adf7b9..a19c3a06d 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/herwig_theta_g_slurm.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/herwig_theta_g_slurm.sh @@ -5,7 +5,7 @@ #SBATCH --partition=long #SBATCH --time=48:00:00 #SBATCH --array=1-2560 -#SBATCH --output=/rstorage/alice/AnalysisResults/james/slurm-%A_%a.out +#SBATCH --output=/rstorage/alice/AnalysisResults/theta_g/slurm-%A_%a.out FILE_PATHS='/rstorage/alice/sim/herwig_gen/files.txt' NFILES=$(wc -l < $FILE_PATHS) diff --git a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/old/herwig_theta_g_slurm_old.sh b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/old/herwig_theta_g_slurm_old.sh new file mode 100755 index 000000000..b76a62a31 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/old/herwig_theta_g_slurm_old.sh @@ -0,0 +1,65 @@ +#! /bin/bash + +#SBATCH --job-name="HerwigGen" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-3000 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +# Number of events per pT-hat bin (for statistics) +NEV_DESIRED=15000000 + +# Lower edges of the pT-hat bins +PTHAT_BINS=(7 9 12 16 21 28 36 45 57 70 85 99 115 132 150 169 190 212 235 260) +echo "Number of pT-hat bins: ${#PTHAT_BINS[@]}" + +# Currently we have 8 nodes * 20 cores active +NCORES=3000 +NEV_PER_JOB=$(( $NEV_DESIRED * ${#PTHAT_BINS[@]} / $NCORES )) +echo "Number of events per job: $NEV_PER_JOB" +NCORES_PER_BIN=$(( $NCORES / ${#PTHAT_BINS[@]} )) +echo "Number of cores per pT-hat bin: $NCORES_PER_BIN" + +BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) / $NCORES_PER_BIN + 1)) +CORE_IN_BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) % $NCORES_PER_BIN + 1)) +PTHAT_MIN=${PTHAT_BINS[$(( $BIN - 1 ))]} +if [ $BIN -lt ${#PTHAT_BINS[@]} ]; then + PTHAT_MAX=${PTHAT_BINS[$BIN]} + echo "Calculating bin $BIN (pThat=[$PTHAT_MIN,$PTHAT_MAX]) with core number $CORE_IN_BIN" +else + echo "Calculating bin $BIN (pThat_min=$PTHAT_MIN) with core number $CORE_IN_BIN" +fi + +SEED=$(( ($CORE_IN_BIN - 1) * NEV_PER_JOB + 1111 )) + +HERWIG_SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/gen/herwig_infiles/$BIN/LHC_5020.run" +HERWIG_SCRIPT_MPI="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/gen/herwig_infiles/$BIN/LHC_5020_MPI.run" +PYTHON_SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/james/herwig_parton_hadron.py" +CONFIG="/home/ezra/pyjetty/pyjetty/alice_analysis/config/theta_g/pp/james_pp.yaml" +TEMP_OUTDIR="/storage/u/alice/AnalysisResults/theta_g/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" +OUTDIR="/rstorage/alice/AnalysisResults/theta_g/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" +mkdir -p $TEMP_OUTDIR +mkdir -p $OUTDIR + +# Load Herwig environment and generate events +source /software/users/james/herwig/bin/activate +cd $TEMP_OUTDIR +echo $PWD +echo "Running Herwig7 with MPI switched off..." +Herwig run $HERWIG_SCRIPT -d2 -N $NEV_PER_JOB -s $SEED +echo "Running Herwig7 with MPI switched on..." +Herwig run $HERWIG_SCRIPT_MPI -d2 -N $NEV_PER_JOB -s $SEED + +# Set modules and load Herwig environment +module use ~/heppy/modules +module load heppy/1.0 +module use ~/pyjetty/modules +module load pyjetty/1.0 +echo "python is" $(which python) +cd /home/ezra/analysis_env/ +pipenv run python $PYTHON_SCRIPT -c $CONFIG --input-file $TEMP_OUTDIR/LHC_5020-S$SEED.log --input-file-mpi $TEMP_OUTDIR/LHC_5020_MPI-S$SEED.log --output-dir $OUTDIR --no-tree + +# Clean up Herwig7 files to save space +rm $TEMP_OUTDIR/LHC_5020-S$SEED.* +rm $TEMP_OUTDIR/LHC_5020_MPI-S$SEED.* diff --git a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/old/pythia_theta_g_slurm_old.sh b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/old/pythia_theta_g_slurm_old.sh new file mode 100755 index 000000000..dc629e251 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/old/pythia_theta_g_slurm_old.sh @@ -0,0 +1,63 @@ +#! /bin/bash + +#SBATCH --job-name="pythiagen" +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-1120 +#SBATCH --output=/rstorage/alice/AnalysisResults/theta_g/slurm-%A_%a.out + +# Center of mass energy in GeV +ECM=5020 + +# Number of events per pT-hat bin (for statistics) +NEV_DESIRED=21000000 + +# Lower edges of the pT-hat bins +PTHAT_BINS=(5 7 9 12 16 21 28 36 45 57 70 85 99 115 132 150 169 190 212 235) +echo "Number of pT-hat bins: ${#PTHAT_BINS[@]}" + +# Currently we have 8 nodes * 20 cores active +NCORES=1120 +NEV_PER_JOB=$(( $NEV_DESIRED * ${#PTHAT_BINS[@]} / $NCORES )) +echo "Number of events per job: $NEV_PER_JOB" +NCORES_PER_BIN=$(( $NCORES / ${#PTHAT_BINS[@]} )) +echo "Number of cores per pT-hat bin: $NCORES_PER_BIN" + +BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) / $NCORES_PER_BIN + 1)) +CORE_IN_BIN=$(( ($SLURM_ARRAY_TASK_ID - 1) % $NCORES_PER_BIN + 1)) +PTHAT_MIN=${PTHAT_BINS[$(( $BIN - 1 ))]} +if [ $BIN -lt ${#PTHAT_BINS[@]} ]; then + USE_PTHAT_MAX=true + PTHAT_MAX=${PTHAT_BINS[$BIN]} + echo "Calculating bin $BIN (pThat=[$PTHAT_MIN,$PTHAT_MAX]) with core number $CORE_IN_BIN" +else + USE_PTHAT_MAX=false + echo "Calculating bin $BIN (pThat_min=$PTHAT_MIN) with core number $CORE_IN_BIN" +fi + +SEED=$(( ($CORE_IN_BIN - 1) * NEV_PER_JOB + 1111 )) + +# Do the PYTHIA simulation & matching +OUTDIR="/rstorage/alice/AnalysisResults/theta_g/$SLURM_ARRAY_JOB_ID/$BIN/$CORE_IN_BIN" +mkdir -p $OUTDIR +module use ~/heppy/modules +module load heppy/1.0 +module use ~/pyjetty/modules +module load pyjetty/1.0 +echo "python is" $(which python) +cd /home/ezra/analysis_env/ +SCRIPT="/home/ezra/pyjetty/pyjetty/alice_analysis/process/user/james/pythia_parton_hadron.py" +CONFIG="/home/ezra/pyjetty/pyjetty/alice_analysis/config/theta_g/pp/james_pp_gen.yaml" + +if $USE_PTHAT_MAX; then + echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX" + pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED \ + --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree \ + --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2,PhaseSpace:pTHatMax=$PTHAT_MAX +else + echo "pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB --no-tree --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2" + pipenv run python $SCRIPT -c $CONFIG --output-dir $OUTDIR \ + --user-seed $SEED --py-pthatmin $PTHAT_MIN --py-ecm $ECM --nev $NEV_PER_JOB \ + --no-tree --pythiaopts HardQCD:all=on,TimeShower:pTmin=0.2 +fi diff --git a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/pythia_theta_g_slurm.sh b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/pythia_theta_g_slurm.sh index cc0b5992c..cec0be929 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/pythia_theta_g_slurm.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/pythia_theta_g_slurm.sh @@ -5,7 +5,7 @@ #SBATCH --partition=long #SBATCH --time=48:00:00 #SBATCH --array=1-2560 -#SBATCH --output=/rstorage/alice/AnalysisResults/james/slurm-%A_%a.out +#SBATCH --output=/rstorage/alice/AnalysisResults/theta_g/slurm-%A_%a.out FILE_PATHS='/rstorage/alice/sim/pythia_gen/files.txt' NFILES=$(wc -l < $FILE_PATHS) diff --git a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/theta_g_gen.sh b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/theta_g_gen.sh index 852dcce1e..e419b0776 100755 --- a/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/theta_g_gen.sh +++ b/pyjetty/alice_analysis/slurm/sbatch/theta_g/gen/theta_g_gen.sh @@ -27,7 +27,7 @@ else fi # Define output path from relevant sub-path of input file -OUTPUT_PREFIX="AnalysisResults/james/$JOB_ID" +OUTPUT_PREFIX="AnalysisResults/theta_g/$JOB_ID" # Note: depends on file structure of input file -- need to edit appropriately for each dataset OUTPUT_SUFFIX=$(echo $INPUT_FILE | cut -d/ -f7-8) #echo $OUTPUT_SUFFIX @@ -43,8 +43,8 @@ module load pyjetty/1.0 module list # Run python script via pipenv -cd /home/james/pyjetty/pyjetty/alice_analysis +cd /home/ezra/pyjetty/pyjetty/alice_analysis python process/user/james/process_parton_hadron_theta_g.py -c config/theta_g/pp/james_pp.yaml -f $INPUT_FILE -o $OUTPUT_DIR # Move stdout to appropriate folder -mv /rstorage/alice/AnalysisResults/james/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/james/${JOB_ID} +mv /rstorage/alice/AnalysisResults/theta_g/slurm-${JOB_ID}_${TASK_ID}.out /rstorage/alice/AnalysisResults/theta_g/${JOB_ID} diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim0.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim0.sh index e592f8fa5..204de12c6 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim0.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim0.sh @@ -1,7 +1,7 @@ #! /bin/bash # Script to merge output ROOT files -JOB_ID=287385 +JOB_ID=1191463 OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" # command line arguments diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim1.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim1.sh index d160cbc3d..aee746624 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim1.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_LHC18b8_fastsim1.sh @@ -1,7 +1,7 @@ #! /bin/bash # Script to merge output ROOT files -JOB_ID=287386 +JOB_ID=1191464 OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" # command line arguments diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_herwig_embedded_fastsim2.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_herwig_embedded_fastsim2.sh new file mode 100755 index 000000000..14140a433 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_herwig_embedded_fastsim2.sh @@ -0,0 +1,39 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1028400 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +#FILES=$( find ${FILE_DIR_BASE}/520/child_*/TrainOutput/${BIN}/ -name "*.root" ) +FILES=$( find ${FILE_DIR_BASE}/260023/${BIN}/ -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_jewel_fastsim.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_jewel_fastsim.sh new file mode 100755 index 000000000..ad20a74cc --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_jewel_fastsim.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1064591 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/${BIN}/ -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_pythia_embedded_fastsim1.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_pythia_embedded_fastsim1.sh new file mode 100755 index 000000000..5a9ea3fac --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/merge_pythia_embedded_fastsim1.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1064590 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/520/child_*/TrainOutput/${BIN}/ -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim0.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim0.sh index 4659d3a9c..5bf75a86c 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim0.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim0.sh @@ -2,8 +2,8 @@ #SBATCH --job-name=mergepthat #SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 -#SBATCH --partition=std -#SBATCH --time=24:00:00 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 #SBATCH --array=1-20 #SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim1.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim1.sh index 5edec82b7..f6a2a4a0a 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim1.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_LHC18b8_fastsim1.sh @@ -2,8 +2,8 @@ #SBATCH --job-name=mergepthat #SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 -#SBATCH --partition=std -#SBATCH --time=24:00:00 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 #SBATCH --array=1-20 #SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_herwig_embedded_fastsim2.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_herwig_embedded_fastsim2.sh new file mode 100755 index 000000000..7910fc1a0 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_herwig_embedded_fastsim2.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=mergepthat +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=std +#SBATCH --time=24:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_herwig_embedded_fastsim2.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_jewel_fastsim.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_jewel_fastsim.sh new file mode 100755 index 000000000..506a6560e --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_jewel_fastsim.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=mergepthat +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_jewel_fastsim.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_pythia_embedded_fastsim1.sh b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_pythia_embedded_fastsim1.sh new file mode 100755 index 000000000..b290805bf --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/fastsim/slurm_merge_pythia_embedded_fastsim1.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=mergepthat +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_pythia_embedded_fastsim1.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC18b8.sh b/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC18b8.sh index 373f59094..9cd7bae94 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC18b8.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC18b8.sh @@ -1,7 +1,7 @@ #! /bin/bash # Script to merge output ROOT files -JOB_ID=351487 +JOB_ID=1191459 OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" # command line arguments diff --git a/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC20g4.sh b/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC20g4.sh new file mode 100755 index 000000000..943e6709f --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/merge_LHC20g4.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1070709 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/$BIN/*/* -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/merge_data.sh b/pyjetty/alice_analysis/slurm/utils/ang/merge_data.sh index 84d6b907f..57aeeae98 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/merge_data.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/merge_data.sh @@ -2,10 +2,11 @@ # # Script to merge output ROOT files -JOB_ID=287383 +JOB_ID=1207359 FILE_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" FILES=$( find "$FILE_DIR" -name "*.root" ) +echo "Number of files: $(wc -l $FILES)" OUTPUT_DIR=/rstorage/alice/AnalysisResults/ang/$JOB_ID hadd -f -j 20 $OUTPUT_DIR/AnalysisResultsFinal.root $FILES diff --git a/pyjetty/alice_analysis/slurm/utils/ang/merge_data_LHC18qr.sh b/pyjetty/alice_analysis/slurm/utils/ang/merge_data_LHC18qr.sh new file mode 100755 index 000000000..08f0184e2 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/merge_data_LHC18qr.sh @@ -0,0 +1,26 @@ +#! /bin/bash +# +# Script to merge output ROOT files + +JOB_ID=1064588 +OUTPUT_DIR=/rstorage/alice/AnalysisResults/ang/$JOB_ID + +# Merge separate subsets, since otherwise it is too large for hadd +RUNLIST_LHC18q="000296414 000296510 000296379 000296377 000296309 000296433 000296068 000296133 000296423 000296065 000296550 000295588 000295586 000296270" +for RUN in $RUNLIST_LHC18q +do + FILE_DIR=$OUTPUT_DIR/LHC18q/$RUN + FILES=$( find "$FILE_DIR" -name "*.root" ) + hadd -f -j 20 $FILE_DIR/AnalysisResultsIntermediate.root $FILES +done + +RUNLIST_LHC18r="000296894 000297446 000297544 000296899 000297479 000297442 000297415 000296934 000297590 000297380 000297123 000296694 000296903 000297218" +for RUN in $RUNLIST_LHC18r +do + FILE_DIR=$OUTPUT_DIR/LHC18r/$RUN + FILES=$( find "$FILE_DIR" -name "*.root" ) + hadd -f -j 20 $FILE_DIR/AnalysisResultsIntermediate.root $FILES +done + +FILES=$( find $OUTPUT_DIR/LHC18*/ -name "AnalysisResultsIntermediate.root" ) +hadd -f $OUTPUT_DIR/AnalysisResultsFinal.root $FILES diff --git a/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC18b8_randmass.sh b/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC18b8_randmass.sh index 3a957e53b..061cdb2eb 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC18b8_randmass.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC18b8_randmass.sh @@ -1,7 +1,7 @@ #! /bin/bash # Script to merge output ROOT files -JOB_ID=351488 +JOB_ID=1191466 OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" # command line arguments diff --git a/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC20g4_randmass.sh b/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC20g4_randmass.sh new file mode 100755 index 000000000..4c6c360bd --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/randmass/merge_LHC20g4_randmass.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1064589 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/$BIN/*/* -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC18b8_randmass.sh b/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC18b8_randmass.sh index c5884e5ad..5f8959edd 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC18b8_randmass.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC18b8_randmass.sh @@ -2,8 +2,8 @@ #SBATCH --job-name=mergepthat #SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 -#SBATCH --partition=std -#SBATCH --time=24:00:00 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 #SBATCH --array=1-20 #SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out diff --git a/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC20g4_randmass.sh b/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC20g4_randmass.sh new file mode 100755 index 000000000..2906d021d --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/randmass/slurm_merge_LHC20g4_randmass.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=merge20g4 +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_LHC20g4_randmass.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/scaleHistograms.py b/pyjetty/alice_analysis/slurm/utils/ang/scaleHistograms.py index 961f8de72..a696f47d7 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/scaleHistograms.py +++ b/pyjetty/alice_analysis/slurm/utils/ang/scaleHistograms.py @@ -4,7 +4,8 @@ # all output lists, subject to some simple criteria that covers basic use cases (can be adapted as needed). # # There is an option "bRemoveOutliers" to remove outliers from certain histograms. The features are -# currently hard-coded below so you will need to modify the code as needed. This feature is adapted from code of Raymond Ehlers. +# currently hard-coded below so you will need to modify the code as needed. +# This feature is adapted from code of Raymond Ehlers. # # Author: James Mulligan (james.mulligan@berkeley.edu) # @@ -15,23 +16,43 @@ import os import sys import yaml +from array import array # Prevent ROOT from stealing focus when plotting ROOT.gROOT.SetBatch(True) +# Load pyjetty ROOT utils +ROOT.gSystem.Load("libpyjetty_rutil.so") +histutils = ROOT.RUtil.HistUtils() + ################################################################################### # Main function def scaleHistograms(configFile, remove_unscaled): # Option to remove outliers from specified histograms + # Remove outliers that are above some amount of the max of the pT-hat bin + pThatRemoveOutliers = False + pThatMaxMultiplier = 3 + pThatBins = [5, 7, 9, 12, 16, 21, 28, 36, 45, 57, 70, 85, + 99, 115, 132, 150, 169, 190, 212, 235, None] ## PYTHIA + #pThatBins = [7, 9, 12, 16, 21, 28, 36, 45, 57, 70, 85, 99, + # 115, 132, 150, 169, 190, 212, 235, 260, None] ## HERWIG + # Histograms to scale; must have pT on x-axis (or axis 1) + pThatHistogramsToCut = [ + "h_ang_JetPt_", "h_mass_JetPt_", "hResponse_JetPt_", "hResidual_JetPt_", + "hJES_R", "hDeltaR_", "hDeltaObs_", "hDeltaPt_emb_R", "hZ_", "hTrackPt"] + # Dimension in the THn to do the cut + pTdimForTHn = 1 + # If the average bin content stays below the "outlierLimit" for "outlierNBinsThreshold" bins, it is removed bRemoveOutliers = False - outlierLimit = 2 - outlierNBinsThreshold=4 - + bOutlierLimit = 3 + # NOTE: CURRENTLY IGNORING IN "SIMPLE" IMPLEMENTATION!! + outlierNBinsThreshold=2 + # Option to print out detailed info about scaling and outlier removal - verbose = False - + verbose = True + # Read the cross-section, and scale histograms EndPtHardBin = 20 with open(configFile, 'r') as stream: @@ -64,12 +85,28 @@ def scaleHistograms(configFile, remove_unscaled): name = key.GetName() if "Scaled" in name: continue - if "roounfold" in name: + elif "roounfold" in name: + continue + elif "hNevents" in name: continue obj = f.Get(name) + if obj: - ScaleAllHistograms(obj, scaleFactor, f, verbose, bRemoveOutliers, outlierLimit, - outlierNBinsThreshold, i-1, EndPtHardBin, name) + pThatHighEdge = pThatBins[i] + if pThatHighEdge != None: + #if pThatHighEdge < 20: + pThatHighEdge *= pThatMaxMultiplier + #if pThatHighEdge < 40: + # pThatHighEdge = 40 + #else: + # pThatHighEdge *= 2 + # if pThatHighEdge < 60: + # pThatHighEdge = 60 + + ScaleAllHistograms( + obj, scaleFactor, f, verbose, pThatRemoveOutliers, pThatHistogramsToCut, + pThatHighEdge, pTdimForTHn, bRemoveOutliers, + bOutlierLimit, outlierNBinsThreshold, i-1, EndPtHardBin, name) else: print('obj not found!') @@ -81,9 +118,10 @@ def scaleHistograms(configFile, remove_unscaled): ################################################################################### # Function to iterate recursively through an object to scale all TH1/TH2/THnSparse -def ScaleAllHistograms(obj, scaleFactor, f, verbose, bRemoveOutliers=False, limit=2, - nBinsThreshold=4, pTHardBin=0, EndPtHardBin=20, taskName=""): - +def ScaleAllHistograms( + obj, scaleFactor, f, verbose, pThatRemoveOutliers, pThatHistogramsToCut, pThatHighEdge, pTdimForTHn, + bRemoveOutliers=False, bOutlierLimit=2, nBinsThreshold=4, pTHardBin=0, EndPtHardBin=20, taskName=""): + # Set Sumw2 if not already done if obj.InheritsFrom(ROOT.THnBase.Class()): if obj.GetSumw2() is 0: @@ -95,35 +133,105 @@ def ScaleAllHistograms(obj, scaleFactor, f, verbose, bRemoveOutliers=False, limi obj.Sumw2() if verbose: print('Set Sumw2 on %s' % obj.GetName()) - + if obj.InheritsFrom(ROOT.TProfile.Class()): if verbose: print("TProfile %s not scaled..." % obj.GetName()) + elif obj.InheritsFrom(ROOT.TH2.Class()): + if pThatRemoveOutliers and pThatHighEdge != None: + for name in pThatHistogramsToCut: + if name in obj.GetName(): + histutils.pThatRemoveOutliers(obj, verbose, pThatHighEdge) + break + if bRemoveOutliers: + histutils.simpleRemoveOutliers(obj, verbose, bOutlierLimit) obj.Scale(scaleFactor) if verbose: print("TH2 %s was scaled..." % obj.GetName()) + elif obj.InheritsFrom(ROOT.TH1.Class()): + if pThatRemoveOutliers and pThatHighEdge != None: + for name in pThatHistogramsToCut: + if name in obj.GetName(): + histutils.pThatRemoveOutliers(obj, verbose, pThatHighEdge) + break if bRemoveOutliers: - name = obj.GetName() + histutils.simpleRemoveOutliers(obj, verbose, bOutlierLimit) + #name = obj.GetName() #only perform outlier removal on these couple histograms - if "Pt" in name: - removeOutliers(pTHardBin, EndPtHardBin, obj, verbose, limit, nBinsThreshold, 1, taskName) + #if "Pt" in name: + # removeOutliers(pTHardBin, EndPtHardBin, obj, verbose, bOutlierLimit, nBinsThreshold, 1, taskName) obj.Scale(scaleFactor) if verbose: print("TH1 %s was scaled..." % obj.GetName()) + elif obj.InheritsFrom(ROOT.THnBase.Class()): + if pThatRemoveOutliers and pThatHighEdge != None: + for name in pThatHistogramsToCut: + if name in obj.GetName(): + histutils.pThatRemoveOutliers(obj, verbose, pThatHighEdge, obj.GetListOfAxes().GetEntries(), pTdimForTHn) + break + if bRemoveOutliers: + histutils.simpleRemoveOutliersTHn(obj, verbose, bOutlierLimit, obj.GetListOfAxes().GetEntries()) obj.Scale(scaleFactor) if verbose: - print("THnSparse %s was scaled..." % obj.GetName()) + print("THn %s was scaled..." % obj.GetName()) + else: if verbose: print("Not a histogram!") print(obj.GetName()) for subobj in obj: - ScaleAllHistograms(subobj, scaleFactor, f, verbose, bRemoveOutliers, limit, + ScaleAllHistograms(subobj, scaleFactor, f, verbose, bRemoveOutliers, bOutlierLimit, nBinsThreshold, pTHardBin, taskName) +''' Python implementation too slow -- use C++ side +################################################################################### +# "Simple" remove outliers function +# Just delete any bin contents with N counts < bOutlierLimit +def simpleRemoveOutliers(hist, verbose=False, bOutlierLimit=2): + + if verbose: + print("Applying simple removal of outliers with counts < %i for %s" % (bOutlierLimit, hist.GetName())) + + for i in range(1, hist.GetNcells()+1): + content = hist.GetBinContent(i) + if content < bOutlierLimit: + hist.SetBinContent(i, 0) + hist.SetBinError(i, 0) + +################################################################################### +# "Simple" remove outliers function for THn using recursion +# Just delete any bin contents with N counts < bOutlierLimit +def simpleRemoveOutliersTHn(hist, verbose=False, bOutlierLimit=2, dim=4): + + if verbose: + print("Applying simple removal of outliers with counts < %i for %s" % (bOutlierLimit, hist.GetName())) + + n_bins = [0] * dim + for d in range(dim): + n_bins[d] = hist.GetAxis(d).GetNbins() + x = [] + simpleRemoveOutliersTHn_recurse(hist, bOutlierLimit, dim, n_bins, x) + +################################################################################### +def simpleRemoveOutliersTHn_recurse(hist, bOutlierLimit, dim, n_bins, x): + + dims_decided = len(x) + + if dims_decided == dim: + bin_x = array('i', x) + if hist.GetBinContent(bin_x) < bOutlierLimit: + hist.SetBinContent(bin_x, 0) + hist.SetBinError(bin_x, 0) + return + + for i in range(1, n_bins[dims_decided] + 1): + x_new = x + [i] + simpleRemoveOutliersTHn_recurse(hist, bOutlierLimit, dim, n_bins, x_new) +''' + ################################################################################### # Function to remove outliers from a TH3 (i.e. truncate the spectrum), based on projecting to the y-axis # It truncates the 3D histogram based on when the 1D projection 4-bin moving average has been above @@ -148,23 +256,23 @@ def removeOutliers(pTHardBin, EndPtHardBin, hist, verbose, limit=2, nBinsThresho if verbose: (preMean, preMedian) = GetHistMeanAndMedian(histToCheck) - + for index in range(0, histToCheck.GetNcells()): if verbose: print("---------") avg = MovingAverage(histToCheck, index = index, numberOfCountsBelowIndex = 2, numberOfCountsAboveIndex = 2) if verbose: print("Index: {0}, Avg: {1}, BinContent: {5}, foundAboveLimit: {2}, cutIndex: {3}, cutLimitReached: {4}".format(index, avg, foundAboveLimit, cutIndex, cutLimitReached, histToCheck.GetBinContent(index))) - if avg > limit: + if not foundAboveLimit and avg > limit: foundAboveLimit = True - + if not cutLimitReached: if foundAboveLimit and avg <= limit: if cutIndex == -1: cutIndex = index nBinsBelowLimitAfterLimit += 1 - - if nBinsBelowLimitAfterLimit != 0 and avg > limit: + + elif nBinsBelowLimitAfterLimit != 0 and avg > limit: # Reset cutIndex = -1 nBinsBelowLimitAfterLimit = 0 @@ -177,7 +285,7 @@ def removeOutliers(pTHardBin, EndPtHardBin, hist, verbose, limit=2, nBinsThresho # the limit and crossing the nBinsThreshold if verbose: print("Hist checked: {0}, cut index: {1}".format(histToCheck.GetName(), cutIndex)) - + # Use on both TH1 and TH2 since we don't start removing immediately, but instead only after the limit if cutLimitReached: if verbose: @@ -247,7 +355,7 @@ def GetHistMeanAndMedian(hist): # Apparently needed to be safe(?) hist.ComputeIntegral() hist.GetQuantiles(1, x, q) - + mean = hist.GetMean() return (mean, x.value) @@ -259,7 +367,7 @@ def MovingAverage(hist, index, numberOfCountsBelowIndex = 0, numberOfCountsAbove # Check inputs if numberOfCountsBelowIndex < 0 or numberOfCountsAboveIndex < 0: print("Moving average number of counts above or below must be >= 0. Please check the values!") - + count = 0. average = 0. for i in range(index - numberOfCountsBelowIndex, index + numberOfCountsAboveIndex + 1): @@ -269,7 +377,7 @@ def MovingAverage(hist, index, numberOfCountsBelowIndex = 0, numberOfCountsAbove #print("Adding {}".format(hist.GetBinContent(i))) average += hist.GetBinContent(i) count += 1 - + #if count != (numberOfCountsBelowIndex + numberOfCountsAboveIndex + 1): # print("Count: {}, summed: {}".format(count, (numberOfCountsBelowIndex + numberOfCountsAboveIndex + 1))) #exit(0) @@ -341,7 +449,7 @@ def getRadiusFromlistName(listName): if __name__ == '__main__': print("Executing scaleHistograms.py...") print("") - + # Define arguments parser = argparse.ArgumentParser(description='Plot analysis histograms') parser.add_argument('-c', '--configFile', action='store', @@ -350,14 +458,14 @@ def getRadiusFromlistName(listName): help="Path of config file for analysis") parser.add_argument("-r", "--remove_unscaled", help="Remove unscaled histograms", action="store_true") - + # Parse the arguments args = parser.parse_args() print('Configuring...') print('configFile: \'{0}\''.format(args.configFile)) print('----------------------------------------------------------------') - + # If invalid configFile is given, exit if not os.path.exists(args.configFile): print('File \"{0}\" does not exist! Exiting!'.format(args.configFile)) diff --git a/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC18b8.sh b/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC18b8.sh index f66007e16..f137f76f4 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC18b8.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC18b8.sh @@ -2,8 +2,8 @@ #SBATCH --job-name=mergepthat #SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 -#SBATCH --partition=std -#SBATCH --time=24:00:00 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 #SBATCH --array=1-20 #SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out diff --git a/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC20g4.sh b/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC20g4.sh new file mode 100755 index 000000000..e611e1075 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/slurm_merge_LHC20g4.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=merge20g4 +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_LHC20g4.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_JETSCAPE_after_scaling.sh b/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_JETSCAPE_after_scaling.sh new file mode 100755 index 000000000..79857d1ee --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_JETSCAPE_after_scaling.sh @@ -0,0 +1,16 @@ +#! /bin/bash +# +# Script to merge output ROOT files from all pt-hat bins together + +#SUFFIX="2760_PP_Colorless" +#SUFFIX="2760_PbPb_0-5_0.30_2.0_1" +#SUFFIX="2760_PbPb_5-10_0.30_2.0_1" +SUFFIX="5020_PP_Colorless" +#SUFFIX="5020_PbPb_0-5_0.30_2.0_1" +#SUFFIX="5020_PbPb_5-10_0.30_2.0_1" +#SUFFIX="5020_PbPb_30-40_0.30_2.0_1" +#SUFFIX="5020_PbPb_40-50_0.30_2.0_1" + +OUTPUT_DIR_BASE=/rstorage/jetscape/AnalysisResults/1223149/v3/$SUFFIX + +hadd -f -j 10 $OUTPUT_DIR_BASE/AnalysisResultsFinal.root $OUTPUT_DIR_BASE/Stage0/*/*.root diff --git a/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_JETSCAPE_before_scaling.sh b/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_JETSCAPE_before_scaling.sh new file mode 100755 index 000000000..eec33262c --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_JETSCAPE_before_scaling.sh @@ -0,0 +1,23 @@ +#! /bin/bash + +# Script to merge output ROOT files +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/540755/v3/2760_PP_Colorless" +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/540755/v3/2760_PbPb_0-5_0.30_2.0_1" +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/540755/v3/2760_PbPb_5-10_0.30_2.0_1" +OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/1223149/v3/5020_PP_Colorless" +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/1223149/v3/5020_PbPb_0-5_0.30_2.0_1" +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/1223149/v3/5020_PbPb_5-10_0.30_2.0_1" +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/540755/v3/5020_PbPb_30-40_0.30_2.0_1" +#OUTPUT_DIR_BASE="/rstorage/jetscape/AnalysisResults/540755/v3/5020_PbPb_40-50_0.30_2.0_1" + +# Loop through pt hat bins and merge files from each pt-hat bin +#BINS=(1 2 3 4 5 7 9 11 13 15 17 20 25 30 35 40 45 50 55 60 70 80 90 100 110 120 130 140 150 160 170 180 190 200 210 220 230 240 250 260 270 280 290 300 350 400 450 500 550 600 700 800 900 1000 1380) +BINS=(1 2 3 4 5 7 9 11 13 15 17 20 25 30 35 40 45 50 55 60 70 80 90 100 110 120 130 140 150 160 170 180 190 200 210 220 230 240 250 260 270 280 290 300 350 400 450 500 550 600 700 800 900 1000 1100 1200 1300 1400 1500 1600 1700 1800 1900 2000 2200 2400 2510) + +for ((i=0; i < ${#BINS[@]}-1; ++i)) +do + mkdir -p ${OUTPUT_DIR_BASE}/Stage0/${i} + + FILES=$( find ${OUTPUT_DIR_BASE}/*/${i} -name "*.root" ) + hadd -f -j 10 ${OUTPUT_DIR_BASE}/Stage0/${i}/AnalysisResults.root $FILES +done diff --git a/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_jewel.sh b/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_jewel.sh new file mode 100755 index 000000000..135846d2a --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/theory/merge_jewel.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1216355 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/*/$BIN/*/* -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 1 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/theory/scale_JETSCAPE_pt_hat.py b/pyjetty/alice_analysis/slurm/utils/ang/theory/scale_JETSCAPE_pt_hat.py new file mode 100755 index 000000000..7b4caf4fb --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/theory/scale_JETSCAPE_pt_hat.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +import os +import sys +import yaml + +sys.path.append('../..') +from jetscape_analysis.analysis import scale_histograms + +#suffix = '2760_PP_Colorless' +#suffix = '2760_PbPb_0-5_0.30_2.0_1' +#suffix = '2760_PbPb_5-10_0.30_2.0_1' +#suffix = '5020_PP_Colorless' +#suffix = '5020_PbPb_0-5_0.30_2.0_1' +suffix = '5020_PbPb_5-10_0.30_2.0_1' +#suffix = '5020_PbPb_30-40_0.30_2.0_1' +#suffix = '5020_PbPb_40-50_0.30_2.0_1' + +base_dir = f'/rstorage/jetscape/AnalysisResults/1223149/v3/{suffix}' +pt_hat_dir = f'/rstorage/jetscape/JETSCAPE-AA-events/skim/497764/v3/{suffix}' + +config_file = '../../config/TG3_ezra.yaml' +with open(config_file, 'r') as stream: + config = yaml.safe_load(stream) + pt_hat_bins = config['pt_hat_bins'] + n_pt_hat_bins = len(pt_hat_bins) - 1 + +for i in range(0, n_pt_hat_bins): + + output_dir = os.path.join(base_dir, f'Stage0/{i}') + + # Get pt-hat scale factor from file + pt_hat_min = pt_hat_bins[i] + pt_hat_max = pt_hat_bins[i+1] + pt_hat_filename = os.path.join(pt_hat_dir, f'SigmaHardBin{pt_hat_min}_{pt_hat_max}.out') + + if not os.path.exists(pt_hat_filename): + continue + + with open(pt_hat_filename) as f: + first_line = f.readline() + line = first_line.replace('\t', ' ').strip() + pt_hat_xsec = float(line.split(' ')[0]) + + scale_histograms.scale_histograms(output_dir, i, pt_hat_xsec, bRemoveOutliers=False) diff --git a/pyjetty/alice_analysis/slurm/utils/ang/theory/slurm_merge_jewel.sh b/pyjetty/alice_analysis/slurm/utils/ang/theory/slurm_merge_jewel.sh new file mode 100755 index 000000000..6a66a124c --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/theory/slurm_merge_jewel.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=merge20g4 +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=1 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_jewel.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/thermal/merge_LHC20g4_thermal.sh b/pyjetty/alice_analysis/slurm/utils/ang/thermal/merge_LHC20g4_thermal.sh new file mode 100755 index 000000000..223a20181 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/thermal/merge_LHC20g4_thermal.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1064587 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/$BIN/*/* -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/thermal/slurm_merge_LHC20g4_thermal.sh b/pyjetty/alice_analysis/slurm/utils/ang/thermal/slurm_merge_LHC20g4_thermal.sh new file mode 100755 index 000000000..385b5a355 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/thermal/slurm_merge_LHC20g4_thermal.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=merge20g4 +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_LHC20g4_thermal.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC18b8_treff.sh b/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC18b8_treff.sh index b8b34637f..df656da2e 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC18b8_treff.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC18b8_treff.sh @@ -1,7 +1,7 @@ #! /bin/bash # Script to merge output ROOT files -JOB_ID=351489 +JOB_ID=1191462 OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" # command line arguments diff --git a/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC20g4_treff.sh b/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC20g4_treff.sh new file mode 100755 index 000000000..aed4d0637 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/treff/merge_LHC20g4_treff.sh @@ -0,0 +1,38 @@ +#! /bin/bash + +# Script to merge output ROOT files +JOB_ID=1150066 +OUTPUT_DIR="/rstorage/alice/AnalysisResults/ang/$JOB_ID" + +# command line arguments +if [ "$1" != "" ]; then + MERGE_JOB_ID=$1 + echo "Merge Job ID: $MERGE_JOB_ID" +else + echo "Wrong command line arguments" +fi + +if [ "$2" != "" ]; then + BIN=$2 + echo "Bin: $BIN" +else + echo "Wrong command line arguments" +fi + +# Load modules +module use /home/ezra/heppy/modules +module load heppy/1.0 +module use /home/ezra/pyjetty/modules +module load pyjetty/1.0 +module list + +# Merge all output files from each pt-hat bin +FILE_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +FILES=$( find ${FILE_DIR_BASE}/$BIN/*/* -name "*.root" ) + +OUT_DIR_BASE=/rstorage/alice/AnalysisResults/ang/$JOB_ID +mkdir -p ${OUT_DIR_BASE}/Stage0/${BIN} +hadd -f -j 10 ${OUT_DIR_BASE}/Stage0/${BIN}/AnalysisResults.root $FILES + +# Move stdout to appropriate folder +mv /rstorage/alice/AnalysisResults/ang/slurm-${MERGE_JOB_ID}_${BIN}.out /rstorage/alice/AnalysisResults/ang/${JOB_ID}/ diff --git a/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC18b8_treff.sh b/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC18b8_treff.sh index e3783b10c..fef87a72f 100755 --- a/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC18b8_treff.sh +++ b/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC18b8_treff.sh @@ -2,8 +2,8 @@ #SBATCH --job-name=mergepthat #SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 -#SBATCH --partition=std -#SBATCH --time=24:00:00 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 #SBATCH --array=1-20 #SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out diff --git a/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC20g4_treff.sh b/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC20g4_treff.sh new file mode 100755 index 000000000..3b49ba2cb --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/ang/treff/slurm_merge_LHC20g4_treff.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +#SBATCH --job-name=merge20g4 +#SBATCH --nodes=1 --ntasks=1 --cpus-per-task=10 +#SBATCH --partition=quick +#SBATCH --time=2:00:00 +#SBATCH --array=1-20 +#SBATCH --output=/rstorage/alice/AnalysisResults/ang/slurm-%A_%a.out + +srun merge_LHC20g4_treff.sh $SLURM_ARRAY_JOB_ID $SLURM_ARRAY_TASK_ID diff --git a/pyjetty/alice_analysis/slurm/utils/lund/merge_data.sh b/pyjetty/alice_analysis/slurm/utils/lund/merge_data.sh new file mode 100755 index 000000000..4e91ef2e1 --- /dev/null +++ b/pyjetty/alice_analysis/slurm/utils/lund/merge_data.sh @@ -0,0 +1,12 @@ +#! /bin/bash +# +# Script to merge output ROOT files + +JOB_ID=1227697 + +FILE_DIR="/rstorage/alice/AnalysisResults/lund/$JOB_ID" +FILES=$( find "$FILE_DIR" -name "*.root" ) +echo "Number of files: $(wc -l $FILES)" + +OUTPUT_DIR=/rstorage/alice/AnalysisResults/lund/$JOB_ID +hadd -f -j 20 $OUTPUT_DIR/AnalysisResultsFinal.root $FILES diff --git a/pyjetty/alihfjets/dev/hfjet/process/user/hf_EEC/pythia_quark_gluon_ezra.py b/pyjetty/alihfjets/dev/hfjet/process/user/hf_EEC/pythia_quark_gluon_ezra.py new file mode 100755 index 000000000..417a65c5e --- /dev/null +++ b/pyjetty/alihfjets/dev/hfjet/process/user/hf_EEC/pythia_quark_gluon_ezra.py @@ -0,0 +1,1273 @@ + +#!/usr/bin/env python +''' +Script for looking at the quark vs gluon dependence of substructure observables +Author: Ezra Lesser (elesser@berkeley.edu), some come from Beatrice L-G +''' + +from __future__ import print_function + +# Fastjet via python (from external library heppy) +import fastjet as fj +import fjcontrib +import fjext +#import ecorrel + +import ROOT + +import tqdm +import yaml +import copy +import argparse +import os +import array +import numpy as np +from array import array +import math + +from pyjetty.mputils import * +from pyjetty.mputils.mputils import pinfo, pwarning + +from heppy.pythiautils import configuration as pyconf +import pythia8 +import pythiafjext +import pythiaext + +from pyjetty.alice_analysis.process.base import process_base + +from enum import Enum +import fjtools + +# Prevent ROOT from stealing focus when plotting +ROOT.gROOT.SetBatch(True) +# Automatically set Sumw2 when creating new histograms +ROOT.TH1.SetDefaultSumw2() +ROOT.TH2.SetDefaultSumw2() + + + +################################################################ +class EMesonDecayChannel(Enum): + kAnyDecay = 0 + kUnknownDecay = 1 #BIT(0) + kDecayD0toKpi = 2 #BIT(1) + kDecayDStartoKpipi = 3 #BIT(2) + +class Promptness(Enum): + kUnknown = 0 + kPrompt = 1 + kNonPrompt = 2 + +################################################################ +class PythiaQuarkGluon(process_base.ProcessBase): + + #--------------------------------------------------------------- + # Constructor + #--------------------------------------------------------------- + def __init__(self, input_file='', config_file='', output_dir='', debug_level=0, args=None, **kwargs): + + super(PythiaQuarkGluon, self).__init__( + input_file, config_file, output_dir, debug_level, **kwargs) + + # Call base class initialization + process_base.ProcessBase.initialize_config(self) + + # Read config file + with open(self.config_file, 'r') as stream: + config = yaml.safe_load(stream) + + if not os.path.exists(self.output_dir): + os.makedirs(self.output_dir) + + self.jetR_list = config["jetR"] + + self.user_seed = args.user_seed + self.nev = args.nev + + self.noMPI = (bool)(1-args.MPIon) + self.noISR = (bool)(1-args.ISRon) + + # self implemented variables to study + self.charmdecaysOFF = (bool)(args.nocharmdecay) #charmdecaysOFF=True(F) when charmdecayon=1(0) + pinfo("charm decay input value", args.nocharmdecay) + self.weighted = (bool)(args.weightON) #weightON=True(F) means turn weights on(off) + self.leading_parton_pt_cut = args.leadingptcut + self.replaceKPpairs = (bool)(args.replaceKP) #replaceKP=True(F) means turn k/pi pairs are('nt) replaced + self.gg2ccbar = (bool)(args.onlygg2ccbar) #gg2ccbar=True means only run gg->ccbar process + self.hardccbar = (bool)(args.onlyccbar) #hard2ccbar=True means only run hard->ccbar process + self.Dstar = (bool)(args.DstarON) #Dstar=True means look at D* EEC, should be run with self.replaceKPpairs=True + self.initscat = args.chinitscat #1=hard->ccbar, 2=gg->ccbar, 3=D0->Kpi channel, 4=hard->bbar w/ D0->Kpi + self.D0wDstar = (bool)(args.D0withDstarON) #D0wDstar=True means looking at D-tagged jets including D0 from D* + self.difNorm = (bool)(args.difNorm) #difNorm=True means normalize D* distribution with (D0+D*) jets + self.softpion_action = args.softpion #1 = remove soft pion from D*, 2 = only pair soft pion with charged particles, 3 = only pair soft pion with D0, 4 = pair soft pion w everything + self.use_ptRL = (bool)(args.giveptRL) #1=True=replace RL in THnSparse with pT*RL + self.phimeson = (bool)(args.runphi) #1=don't let phi meson decay and look at its EEC + + # PDG ID values for quarks and gluons + self.quark_pdg_ids = [1, 2, 3, 4, 5, 6, 7, 8, -1, -2, -3, -4, -5, -6, -7, -8] + self.down_pdg_ids = [1, -1] + self.up_pdg_ids = [2, -2] + self.strange_pdg_ids = [3, -3] + self.charm_pdg_ids = [4, -4] + self.gluon_pdg_ids = [9, 21] + self.beauty_pdg_ids = [5, -5] + + # hadron level - LHCb tracking restriction + self.max_eta_hadron = 5 + self.min_eta_hadron = 2 + + self.min_leading_track_pT = config["min_leading_track_pT"] if "min_leading_track_pT" in config else None + + self.pt_bins = array.array('d', list(range(5, 100, 5)) + list(range(100, 210, 10))) + self.obs_bins_ang = np.concatenate((np.linspace(0, 0.009, 10), np.linspace(0.01, 0.1, 19), + np.linspace(0.11, 0.8, 70))) + self.obs_bins_mass = np.concatenate( + (np.array([0, 1]), np.linspace(1.8, 9.8, 41), np.linspace(10, 14.5, 10), + np.linspace(15, 19, 5), np.linspace(20, 60, 9))) + + self.observable_list = config['process_observables'] + self.obs_settings = {} + self.obs_grooming_settings = {} + self.obs_names = {} + for observable in self.observable_list: + + obs_config_dict = config[observable] + obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + + obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ] + pinfo("obs_subconfig_list", obs_subconfig_list) + self.obs_settings[observable] = self.utils.obs_settings(observable, obs_config_dict, obs_subconfig_list) + pinfo("self.obs_settings[observable]", self.obs_settings[observable]) + self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict) + pinfo("self.obs_grooming_settings[observable]", self.obs_grooming_settings[observable]) + + self.obs_names[observable] = obs_config_dict["common_settings"]["xtitle"] + + #--------------------------------------------------------------- + # Main processing function + #--------------------------------------------------------------- + def pythia_quark_gluon(self, args): + + # Create ROOT TTree file for storing raw PYTHIA particle information + outf_path = os.path.join(self.output_dir, args.tree_output_fname) + outf = ROOT.TFile(outf_path, 'recreate') + outf.cd() + + # Initialize response histograms + self.initialize_hist() + + pinfo('user seed for pythia', self.user_seed) #TODO: what does this do?? it doesn't work... +# print('user seed for pythia', self.user_seed) + mycfg = ['Random:setSeed=on', 'Random:seed={}'.format(self.user_seed)] + mycfg.append('HadronLevel:all=off') + pinfo("charmdecays value", self.charmdecaysOFF) + if (self.charmdecaysOFF == True and self.replaceKPpairs == False): + pinfo("charm decays turning OFF") + # Mesons + mycfg.append('411:mayDecay = no') # D+ + mycfg.append('421:mayDecay = no') # D0 + mycfg.append('10411:mayDecay = no') # D*0(2400)+ + mycfg.append('10421:mayDecay = no') # D*0(2400)0 + mycfg.append('413:mayDecay = no') # D*(2010)+ + mycfg.append('423:mayDecay = no') # D*(2007)0 + mycfg.append('10413:mayDecay = no') # D1(2420)+ + mycfg.append('10423:mayDecay = no') # D1(2420)0 + mycfg.append('20413:mayDecay = no') # D1(H)+ + mycfg.append('20423:mayDecay = no') # D1(2430)0 + mycfg.append('415:mayDecay = no') # D*2(2460)0 + mycfg.append('425:mayDecay = no') # D*2(2460)0 + mycfg.append('431:mayDecay = no') # D+s + mycfg.append('10431:mayDecay = no') # D*s0(2317)+ + mycfg.append('433:mayDecay = no') # D*s+ + mycfg.append('10433:mayDecay = no') # Ds1(2536)+ + mycfg.append('20433:mayDecay = no') # Ds1(2460)+ + mycfg.append('435:mayDecay = no') # D*s2(2573)+ + + # Baryons + mycfg.append('4122:mayDecay = no') # Lc+ + mycfg.append('4222:mayDecay = no') # Sigmac++ + mycfg.append('4212:mayDecay = no') # Sigmac+ + mycfg.append('4112:mayDecay = no') # Sigmac0 + mycfg.append('4224:mayDecay = no') # Sigma*c++ + mycfg.append('4214:mayDecay = no') # Sigma*c+ + mycfg.append('4114:mayDecay = no') # Sigma*c0 + mycfg.append('4232:mayDecay = no') # Xi+c + mycfg.append('4132:mayDecay = no') # Xi0c + mycfg.append('4322:mayDecay = no') # Xiprimec+ + mycfg.append('4312:mayDecay = no') # Xiprimec0 + mycfg.append('4324:mayDecay = no') # Xi*c+ + mycfg.append('4314:mayDecay = no') # Xi*c0 + mycfg.append('4332:mayDecay = no') # Omega0c + mycfg.append('4334:mayDecay = no') # Omega*c0 + mycfg.append('4412:mayDecay = no') + mycfg.append('4422:mayDecay = no') + mycfg.append('4414:mayDecay = no') + mycfg.append('4424:mayDecay = no') + mycfg.append('4432:mayDecay = no') + mycfg.append('4434:mayDecay = no') + mycfg.append('4444:mayDecay = no') + + if (self.initscat == 1): #if (self.hardccbar): + mycfg.append('HardQCD:all = off') + mycfg.append('HardQCD:hardccbar = on') + + elif (self.initscat == 2): #if (self.gg2ccbar): + mycfg.append('HardQCD:all = off') + mycfg.append('HardQCD:gg2ccbar = on') + + elif (self.initscat == 3): # just D0->Kpi + #mycfg.append('HardQCD:all = off') + #mycfg.append('HardQCD:hardccbar = on') + + mycfg.append('421:onMode = off') + mycfg.append('421:onIfMatch = 321 211') + + elif (self.initscat == 4): # hard->bbar with D0 -> (only) Kpi + mycfg.append('HardQCD:all = off') + mycfg.append('HardQCD:hardbbbar = on') + + mycfg.append('421:onMode = off') + mycfg.append('421:onIfMatch = 321 211') + + if (self.phimeson): + pinfo("turning phi's OFF") + mycfg.append('333:mayDecay = no') + # mycfg.append('100333:mayDecay = no') + # mycfg.append('337:mayDecay = no') + + if (self.replaceKPpairs): + if (not (self.Dstar or self.D0wDstar or self.difNorm)): + pinfo("turning D*'s OFF") + mycfg.append('10411:mayDecay = no') + mycfg.append('10421:mayDecay = no') + mycfg.append('413:mayDecay = no') + mycfg.append('423:mayDecay = no') + mycfg.append('10413:mayDecay = no') + mycfg.append('10423:mayDecay = no') + mycfg.append('20413:mayDecay = no') + mycfg.append('20423:mayDecay = no') + mycfg.append('415:mayDecay = no') + mycfg.append('425:mayDecay = no') + mycfg.append('431:mayDecay = no') + mycfg.append('10431:mayDecay = no') + mycfg.append('433:mayDecay = no') + mycfg.append('10433:mayDecay = no') + mycfg.append('20433:mayDecay = no') + mycfg.append('435:mayDecay = no') + + # print the banner first + fj.ClusterSequence.print_banner() + print() + + # ------------------------------- + # Setting MPIs and ISRs + print('Will run no MPI:', self.noMPI) + print('Will run no ISR:', self.noISR) + setattr(args, "py_noMPI", self.noMPI) + setattr(args, "py_noISR", self.noISR) + # ------------------------------- + + self.pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) + # print("----------------- PARTICLE DATA INFO HERE -----------------") + # pythia.particleData.listAll() + # print("----------------- PARTICLE DATA INFO END -----------------") + + self.init_jet_tools() + self.calculate_events(self.pythia) + self.pythia.stat() + print() + + self.scale_print_final_info(self.pythia) + + outf.Write() + outf.Close() + + self.save_output_objects() + + #--------------------------------------------------------------- + # Initialize histograms + #--------------------------------------------------------------- + def initialize_hist(self): + + self.make_durations = False # Can slow down batch processing, for tests only + if self.make_durations: + self.jade_durations = [] + self.wta_durations = [] + + self.hNevents = ROOT.TH1I("hNevents", 'Number accepted events (unscaled)', 2, -0.5, 1.5) + self.hD0Nevents = ROOT.TH1I("hD0Nevents", "Total Number of D0 events (unscaled)", 2, -0.5, 1.5) + self.hD0KpiNevents = ROOT.TH1I("hD0KpiNevents", "Number of D0->Kpi events (unscaled)", 2, -0.5, 1.5) + self.hD0KpiNjets = ROOT.TH1I("hD0KpiNjets", "Number of D0->Kpi jets (unscaled)", 2, -0.5, 1.5) #accidentally called "hD0KpiNehD0KpiNjetsvents" + self.hDstarNjets = ROOT.TH1I("hDstarNjets", "Number of D* jets (unscaled)", 2, -0.5, 1.5) + self.hsoftpionpT = ROOT.TH1D("hsoftpionpT", "pT of soft pion from D*", 50, 0, 50) + self.hDeltaR = ROOT.TH1F("hDeltaR", 'Delta R between jet and each parent', 40, 0, 0.4) + + if self.phimeson: + self.hphiNevents = ROOT.TH1I("hphiNevents", "Total Number of phi events (unscaled)", 2, -0.5, 1.5) + self.hphiNjets = ROOT.TH1I("hphiNjets", "Number of phi jets (unscaled)", 2, -0.5, 1.5) + + for jetR in self.jetR_list: + + # Store a list of all the histograms just so that we can rescale them later + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + setattr(self, hist_list_name, []) + + R_label = str(jetR).replace('.', '') + 'Scaled' + + for observable in self.observable_list: + + if observable != "mass": + raise ValueError("Observable %s is not implemented in this script" % observable) + + obs_name = self.obs_names[observable] + obs_bins = getattr(self, "obs_bins_" + observable) + # Use more finely binned pT bins for TH2s than for the RMs + pt_bins = array.array('d', list(range(0, 201, 1))) + #rapi_bins = np.linspace(-5,5,201) + + #dim = 4 + #nbins = [len(pt_bins)-1, len(pt_bins)-1, len(rapi_bins)-1, 50] + #min_li = [pt_bins[0], pt_bins[0], rapi_bins[0], obs_bins[0]] + #max_li = [pt_bins[-1], pt_bins[-1], rapi_bins[-1], obs_bins[-1]] + + #nbins = (nbins) + #xmin = (min_li) + #xmax = (max_li) + + #nbins_array = array.array('i', nbins) + #xmin_array = array.array('d', xmin) + #xmax_array = array.array('d', xmax) + + # Loop over subobservable (alpha value) + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + pinfo("all the settings", obs_setting, grooming_setting, obs_label) + + partontypeslist = ["charm"] #, "light", "gluon", "inclusive"] + #if (self.initscat == 4): + # partontypeslist.append("beauty") + + for parton_type in partontypeslist: + + title = [ '#it{p}_{T}^{ch jet}', '#it{p}_{T}^{D^{0}}', obs_name] + + # make TH3D for observable + name = ('h3D_%s_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h3D_%s_JetPt_%s_R%s' % (observable, parton_type, jetR)) + h3D = ROOT.TH3D(name, name, len(pt_bins)-1, pt_bins, len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + h3D.Sumw2() + h3D.GetXaxis().SetTitle(title[0]) + h3D.GetYaxis().SetTitle(title[1]) + h3D.GetZaxis().SetTitle(title[2]) + ''' + for i in range(0, 3): + hsparse.GetAxis(i).SetTitle(title[i]) + if i == 0 or i == 1: + hsparse.SetBinEdges(i, pt_bins) + if i == 2: + hsparse.SetBinEdges(i, rapi_bins) + if i == 3: + hsparse.SetBinEdges(i, obs_bins) + ''' + setattr(self, name, h3D) + getattr(self, hist_list_name).append(h3D) + + # SD-with-JADE tagged jets + name = ('h3D_%s_jade_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h3D_%s_jade_JetPt_%s_R%s' % (observable, parton_type, jetR)) + h3D = ROOT.TH3D(name, name, len(pt_bins)-1, pt_bins, len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + h3D.Sumw2() + h3D.GetXaxis().SetTitle(title[0]) + h3D.GetYaxis().SetTitle(title[1]) + h3D.GetZaxis().SetTitle(title[2]) + setattr(self, name, h3D) + getattr(self, hist_list_name).append(h3D) + + # WTA tagged jets + name = ('h3D_%s_wta_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h3D_%s_wta_JetPt_%s_R%s' % (observable, parton_type, jetR)) + h3D = ROOT.TH3D(name, name, len(pt_bins)-1, pt_bins, len(pt_bins)-1, pt_bins, len(obs_bins)-1, obs_bins) + h3D.Sumw2() + h3D.GetXaxis().SetTitle(title[0]) + h3D.GetYaxis().SetTitle(title[1]) + h3D.GetZaxis().SetTitle(title[2]) + setattr(self, name, h3D) + getattr(self, hist_list_name).append(h3D) + + #--------------------------------------------------------------- + # Initiate jet defs, selectors, and sd (if required) + #--------------------------------------------------------------- + def init_jet_tools(self): + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + # set up our jet definition and a jet selector + jet_def = fj.JetDefinition(fj.antikt_algorithm, jetR) + setattr(self, "jet_def_R%s" % jetR_str, jet_def) + + pwarning('eta range for particles after hadronization set to', self.min_eta_hadron, "< eta <", self.max_eta_hadron) + parts_selector_h = fj.SelectorPtMin(0.10) & fj.SelectorAbsEtaMax(self.max_eta_hadron) & fj.SelectorAbsEtaMin(self.min_eta_hadron) + setattr(self, "parts_selector_h", parts_selector_h) + parts_selector_ch = parts_selector_h + setattr(self, "parts_selector_ch", parts_selector_ch) + + for jetR in self.jetR_list: + jetR_str = str(jetR).replace('.', '') + + jet_selector = fj.SelectorPtMin(5.0) & fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + #jet_selector = fj.SelectorPtMin(0.) & fj.SelectorAbsEtaMax(self.max_eta_hadron - jetR) + setattr(self, "jet_selector_R%s" % jetR_str, jet_selector) + + count1 = 0 # Number of partonic parents which match to >1 ch-jets + setattr(self, "count1_R%s" % jetR_str, count1) + count2 = 0 # Number of partonic parents which match to zero ch-jets + setattr(self, "count2_R%s" % jetR_str, count2) + + #--------------------------------------------------------------- + # Calculate events and pass information on to jet finding + #--------------------------------------------------------------- + def calculate_events(self, pythia): + + iev = 0 # Event loop count + + self.parton_counter = 0 + + while iev < self.nev: + if not pythia.next(): + continue + + self.event = pythia.event + + # Check if the event contains desired parton, else continue + desired_pid = [4] #, 5] # charm, bottom quark + desired_parton_found = False + for parton in pythia.event: + if parton.id() in desired_pid: + if (self.min_eta_hadron - 1) <= abs(parton.eta()) <= (self.max_eta_hadron + 1): + desired_parton_found = True + break + if not desired_parton_found: + self.parton_counter += 1 + continue + #is_desired_parton = [abs(particle.id()) in desired_pid for particle in self.event] + #if True not in is_desired_parton: + # continue + + # print(self.event) # to print out a table of the event information + fs_parton_5 = fj.PseudoJet(pythia.event[5].px(), pythia.event[5].py(), pythia.event[5].pz(), pythia.event[5].e()) + fs_parton_6 = fj.PseudoJet(pythia.event[6].px(), pythia.event[6].py(), pythia.event[6].pz(), pythia.event[6].e()) + self.parents = [fs_parton_5, fs_parton_6] # parent partons in dijet + + # Save PDG code of the parent partons + self.parent_ids = [pythia.event[5].id(), pythia.event[6].id()] + + # parton level + parts_pythia_p = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + + # Get hadron-level event + require_pid = 421 # D0 particle ID + daughters_size = -1 # require certain number of daughters (disabled = -1) + if self.Dstar: + require_pid = 413 + dauthers_size = 2 + elif self.replaceKPpairs: # D0 or D0-with-D* + dauthers_size = 2 + elif self.phimeson: + require_pid = 333 # phi meson particle ID + + # If successful, returns index of the satisfiying particle in the event, else -1 + satisfier_ip = pythiafjext.update_hadronization(pythia, require_pid, daughters_size) + if satisfier_ip == -1: + continue + + # full-hadron level + if ( self.replaceKPpairs == False ): + parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) + # print("There are ", len(old_pythia_hch), "in oph, and ", len(phis_pythia_hch), " in phis", len(parts_pythia_hch), "> + else: #replace D0->Kpi + if ( self.softpion_action != 1): + parts_pythia_h = pythiafjext.vectorize_select_replaceD0(pythia, [pythiafjext.kFinal], 0, True) + else: + parts_pythia_h = pythiafjext.vectorize_select_replaceD0(pythia, [pythiafjext.kFinal], 0, True, True) + + # print("!! pythia hadron (before vectorization) event size is ", pythia.event.size()) + # eventcounter = 0 + # for event in pythia.event: + # # if event.id() == 111 or event.id() == 211 or event.id() == -211: #pi0 or pi+ or pi- + # # print(eventcounter, "pion with event id", event.id()) + # eventcounter+=1 + + #testing + # parts_pythia_hch_noreplace = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + # parts_pythia_hch_replaced = pythiafjext.vectorize_select_replaceD0(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + + # charged-hadron level + if ( self.replaceKPpairs == False ): + if ( self.phimeson ): + old_pythia_hch = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + phis_pythia_hch = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kPhi], 0, True) + parts_pythia_hch = pythiafjext.add_vectors(old_pythia_hch, phis_pythia_hch) + # print("There are ", len(old_pythia_hch), "in oph, and ", len(phis_pythia_hch), " in phis", len(parts_pythia_hch), " in parts") + else: + parts_pythia_hch = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + else: #replace D0->Kpi + if ( self.softpion_action != 1): + parts_pythia_hch = pythiafjext.vectorize_select_replaceD0(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) + else: + parts_pythia_hch = pythiafjext.vectorize_select_replaceD0(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True, True) + # print("Size of 1 vector", len(parts_pythia_hch_noreplace)) + # print("Size of 2 vector", len(parts_pythia_hch_replaced)) + # print("Size of new vector", len(parts_pythia_hch)) + + # look at events in charged hadron?? + # print("!! pythia hadron (after vectorization) event size is ", pythia.event.size()) + #TODO: move this block above choosing final state charged particles?? + particlecounter = 0 + D0found = False + D0Kpidecayfound = False + phifound = False + self.DstarKpipidecayfound = False + #for ip in range(satisifer_ip, len(self.event)): + particle = self.event[satisfier_ip] + if abs(particle.id()) == 421 and self.min_eta_hadron <= abs(particle.eta()) <= self.max_eta_hadron: #D0 + D0found = True + decayChannel = self.checkDecayChannel(particle, self.event) + if decayChannel == EMesonDecayChannel.kDecayD0toKpi: + #print("D0 eta:", particle.eta(), "// D0 pT:", + # math.sqrt(particle.px()*particle.px() + particle.py()*particle.py())) + D0Kpidecayfound = True + if decayChannel == EMesonDecayChannel.kDecayDStartoKpipi: + self.DstarKpipidecayfound = True + elif abs(particle.id()) == 413: # D* + self.DstarKpipidecayfound = True + elif abs(particle.id()) == 333: # phi + phifound = True + + + #if D0->Kpi found, count the events; if not, check that length of charged final state hadrons vector is 0 + if (D0Kpidecayfound): + self.hD0KpiNevents.Fill(0) + if (D0found): + self.hD0Nevents.Fill(0) + if (self.phimeson and phifound): + self.hphiNevents.Fill(0) + + # Some "accepted" events don't survive hadronization step -- keep track here + self.hNevents.Fill(0) + self.find_jets_fill_histograms(parts_pythia_h, parts_pythia_hch, iev, D0Kpidecayfound) + + if (iev%100 == 0): + print("Event", iev) + # print("Event", iev) + + iev += 1 + + #--------------------------------------------------------------- + # Find primordial parent + #--------------------------------------------------------------- + def primordial_parent(self,p): + parent1 = parent2 = -10 + while p > 6: + parent1 = self.event[p].mother1() + parent2 = self.event[p].mother2() + if parent1 != parent2: + p = max(parent1,parent2) + else: + p = parent1 + return p + + # trk_thrd default set 0, meaning all tracks would pass + def checkIfPartInJetConst(self, jet_const_arr, pythia_particle_index, trk_thrd=0): + in_jet = False + for c in jet_const_arr: + # print("jet const user index", c.user_index(), pythiafjext.getPythia8Particle(c).name()) + if (c.user_index() == pythia_particle_index and c.pt() >= trk_thrd): + in_jet = True + # print("ifpartinjet", c.user_index(), pythia_particle_index) + break + return in_jet + + #--------------------------------------------------------------- + # Find jets, do matching between levels, and fill histograms + #--------------------------------------------------------------- + def find_jets_fill_histograms(self, parts_pythia_h, parts_pythia_hch, iev, D0Kpidecayfound): + + # Don't waste time if there are no D0 mesons + if not D0Kpidecayfound: + return + + parts_selector_h = getattr(self, "parts_selector_h") + parts_selector_ch = getattr(self, "parts_selector_ch") + + # Loop over jet radii + for jetR in self.jetR_list: + + jetR_str = str(jetR).replace('.', '') + jet_selector = getattr(self, "jet_selector_R%s" % jetR_str) + jet_def = getattr(self, "jet_def_R%s" % jetR_str) + + count1 = getattr(self, "count1_R%s" % jetR_str) + count2 = getattr(self, "count2_R%s" % jetR_str) + + # Get the jets at different levels + #jets_p = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_p ))) # parton level + #jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_h ))) # full hadron level + if (not self.replaceKPpairs and not self.phimeson): + jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_h))) + jets_ch = fj.sorted_by_pt(jet_selector(jet_def(parts_pythia_hch))) # charged hadron level + else: + jets_h = fj.sorted_by_pt(jet_selector(jet_def(parts_selector_h(parts_pythia_h)))) + jets_ch = fj.sorted_by_pt(jet_selector(jet_def(parts_selector_ch(parts_pythia_hch)))) # charged hadron level + # print("!! length of jets_ch", len(jets_ch)) + + R_label = str(jetR).replace('.', '') + 'Scaled' + + ''' Matching jet to parent parton + # Find the charged jet closest to the axis of the original parton + # Require that the match is within some small angle, and that it is unique + jet_matching_distance = 0.6 # Match jets with deltaR < jet_matching_distance*jetR + self.parent0match, self.parent1match = None, None + anothacounter=0 + # print("LOOPING OVER JETS") + for i_jch, jch in enumerate(jets_ch): + # print(i_jch) + # Do constituent pT cut +# pinfo("self.min_leading_track_pT", self.min_leading_track_pT) +# if self.min_leading_track_pT and not \ +# self.utils.is_truth_jet_accepted(jch): +# # self.utils.is_truth_jet_accepted(jch, self.min_leading_track_pT): +# continue + # print("PARENTS:",self.parents) + for i_parent, parent in enumerate(self.parents): + anothacounter+=1 + parentmatch_name = "parent%imatch" % i_parent + # print("CHECKING PARENT", i_parent) + # print("DELTA R TO JET:", jch.delta_R(parent)) + #plot + self.hDeltaR.Fill(jch.delta_R(parent)) + if jch.delta_R(parent) < jet_matching_distance * jetR: + match = getattr(self, parentmatch_name) + # print("MATCH FOR",i_parent,":",match) + if not match: + setattr(self, parentmatch_name, jch) + # print("MATCH SET TO JET WITH pT", jch.pt()) + else: # Already found a match + # Set flag value so that we know to ignore this one + # print("already found a match flagged") + setattr(self, parentmatch_name, 0) + # print(i_jch, "anothacounter", anothacounter) + + # print("event num", iev) + # print("Cehckpoint 1") + + # If we have matches, fill histograms + for i_parent, parent in enumerate(self.parents): + # print("in nexr loop") + jet = getattr(self, "parent%imatch" % i_parent) + # print(jet) + if not jet: + # pinfo("in not jet") + if jet == 0: # More than one match -- take note and continue + # print("case 1") + count1 += 1 + continue + else: # jet == None + # No matches -- take note and continue + # print("case 2") + count2 += 1 + continue + + # print("CHECKPOINT 2") + + # One unique match + # Identify the histograms which need to be filled +# pinfo("passed not jet") + parton_id = self.parent_ids[i_parent] + # print("parton_id is ", parton_id) + parton_types = [] + if parton_id in self.quark_pdg_ids: + # parton_types += ["quark"] + if parton_id in self.charm_pdg_ids: + parton_types += ["charm"] + elif parton_id in self.up_pdg_ids or parton_id in self.down_pdg_ids or parton_id in self.strange_pdg_ids: + parton_types += ["light"] + elif (parton_id in self.beauty_pdg_ids and self.initscat == 4): + parton_types += ["beauty"] + elif parton_id in self.gluon_pdg_ids: + parton_types += ["gluon"] + if self.phimeson: + parton_types += ["inclusive"] + + # If parent parton not identified, skip for now + if not len(parton_types): + continue + + # print(D0Kpidecayfound) + if D0Kpidecayfound: + print("parton types", parton_types) + ''' + + for i_jh, jet in enumerate(jets_h): + + # Select for just D0-tagged jets #TODO: check if this D0 goes to kaon pion?? + D0taggedjet = False + N_D0 = 0 + Dstartaggedjet = False + if ( self.replaceKPpairs ): + # print("There are ", len(jet.constituents()), "constituents.") + for c in jet.constituents(): + constituent_pdg_idabs = pythiafjext.getPythia8Particle(c).idAbs() + constituent_pdg_index = c.user_index() + if (constituent_pdg_idabs == 421): + decayChannel = self.checkDecayChannel(pythiafjext.getPythia8Particle(c), self.event) + if (decayChannel == EMesonDecayChannel.kDecayD0toKpi): + self.getD0Info(pythiafjext.getPythia8Particle(c)) + N_D0 += 1 + D0taggedjet = True + #break + elif (decayChannel == EMesonDecayChannel.kDecayDStartoKpipi): + self.getD0Info(pythiafjext.getPythia8Particle(c)) + Dstartaggedjet = True + + # save soft pion info from D* if needed + if ( self.softpion_action >= 2 ): #(self.Dstar): + # get the soft pion + if (self.checkD0motherIsDstar(self.D0particleinfo, self.event)): + # print("mother is in fact a Dstar") + softpion_index = self.getSoftPion(self.D0particleinfo, self.event, jet.constituents()) + # print("the soft pion index is", softpion_index) + if softpion_index == -1: + self.softpion_particleinfo_psjet = None + else: + self.softpion_particleinfo_psjet = self.getParticleAsPseudojet(self.event[softpion_index]) + softpion_pt = self.event[softpion_index].pT() + self.hsoftpionpT.Fill(softpion_pt) + self.D0particleinfo_psjet = self.getParticleAsPseudojet(self.event[constituent_pdg_index])#self.D0particleinfo) + + break + + # Skip jets that are not dtagged or dstar tagged + if not (D0taggedjet or Dstartaggedjet): + #print("continuing......") + continue + elif N_D0 > 1: + print("Found %i D0 particles in this jet, continuing" % N_D0) + continue + + # Select D* jets when required + if ( self.difNorm == False ): + if ( not self.Dstar and not self.D0wDstar ): + if ( not D0taggedjet ): #if not a D0 tagged jet, move to next jet + print("Dstar is false, D0wDstar is false, and this is not D0tagged jet") + continue + if ( self.Dstar and not Dstartaggedjet ): #if only looking at D*s and D* is not tagged, move to next jet + # print("Dstar is true and Dstar is not tagged") + continue + + # check if prompt or nonprompt - NOT for the Dstars - TODO: CHECK THIS LATER + # if (not self.Dstar): + # self.promptness = self.checkPrompt(pythiafjext.getPythia8Particle(c), self.event) + # print("prompt???", self.promptness) + + phitaggedjet = False + # print("There are ", len(jet.constituents()), "constituents.") + if (self.phimeson): + for c in jet.constituents(): + constituent_pdg_idabs = pythiafjext.getPythia8Particle(c).idAbs() + constituent_pdg_index = c.user_index() + # print("const index from pythiafjext", pythiafjext.getPythia8Particle(c).index(), constituent_pdg_idabs, constituent_pdg_index) + # print("const user_index from pythiafjext", c.user_index()) + if (constituent_pdg_idabs == 333): #TODO: this is assuming there is only one phi per jet! + print("phi jet!") + self.getD0Info(pythiafjext.getPythia8Particle(c)) #naming here is bad but just wanted to reuse the variable + phitaggedjet = True + break + + # move on if this jet doesn't have a phi meson + if ( not phitaggedjet ): + # print("Not a phi jet") + continue + + # count the number of D0-tagged jets + if (D0taggedjet): #D0Kpidecayfound): + self.hD0KpiNjets.Fill(0) + if (Dstartaggedjet): #self.DstarKpipidecayfound): + self.hDstarNjets.Fill(0) + if (self.phimeson and phitaggedjet): + self.hphiNjets.Fill(0) + + if not (jet.has_constituents() or jet.has_structure()): + continue + + ############################################################### + # Check for tagging using various algorithms + ############################################################## + + # JADE + start_time_jade = 0 + if self.make_durations: + start_time_jade = time.time() + jade_tagged = False + JADE_SD_BETA = 1; JADE_SD_ZCUT = 0.1 + #reclusterer_jade = fj.JadePlugin() # Python can't find this for some reason + #jet_def_jade = fj.JetDefinition(reclusterer_jade) + jet_def_jade = fjext.jet_def_jade() + gshop_jade = fjcontrib.GroomerShop(jet, jet_def_jade) + ld_jade = gshop_jade.soft_drop(JADE_SD_BETA, JADE_SD_ZCUT) + jet_gr_jade = ld_jade.pair() + #n_HF_meson_found = 0 + if jet_gr_jade.has_constituents() and jet_gr_jade.has_structure(): + # Look for a D0 inside the jet + for constit in jet_gr_jade.constituents(): + if pythiafjext.getPythia8Particle(constit).idAbs() == 421: + #n_HF_meson_found += 1 + jade_tagged = True + break; + #if n_HF_meson_found == 1: + # jade_tagged = True + if self.make_durations: + self.jade_durations.append(time.time() - start_time_jade) + + # WTA RECLUSTERING + start_time_wta = 0 + if self.make_durations: + start_time_wta = time.time() + wta_tagged = False + # fastjet::max_allowable_R == 1000.0 + jet_def_wta = fj.JetDefinition(fj.cambridge_algorithm, 1000.0) + jet_def_wta.set_recombination_scheme(fj.WTA_pt_scheme) + recluster_wta = fjcontrib.Recluster(jet_def_wta) + jet_wta = recluster_wta.result(jet) + if jet_wta.has_constituents(): + # Loop through constituents to find the one aligned with WTA axis + for constit in jet_wta.constituents(): + if constit.delta_R(jet_wta) < 1e-8: + # Particle found + if pythiafjext.getPythia8Particle(constit).idAbs() == 421: + wta_tagged = True + break + if self.make_durations: + self.wta_durations.append(time.time() - start_time_wta) + + ################################################################# + + # Fill histograms + for observable in self.observable_list: + #pinfo("len(self.obs_settings[observable])", len(self.obs_settings[observable])) + for i in range(len(self.obs_settings[observable])): + + obs_setting = self.obs_settings[observable][i] + grooming_setting = self.obs_grooming_settings[observable][i] + obs_label = self.utils.obs_label(obs_setting, grooming_setting) + + # Groom jet, if applicable + jet_groomed_lund = None + if grooming_setting: + gshop = fjcontrib.GroomerShop(jet, jetR, self.reclustering_algorithm) + jet_groomed_lund = self.utils.groom(gshop, grooming_setting, jetR) + if not jet_groomed_lund: + continue + + # Apply cut on leading track pT + if self.leading_parton_pt_cut: + leading_parton = fj.sorted_by_pt(jet.constituents())[0] + if (leading_parton.pt() < self.leading_parton_pt_cut): + continue + + # skip filling the pair level information if necessary + if (self.difNorm): + if ( not self.Dstar and not self.D0wDstar ): + if ( Dstartaggedjet ): + continue + elif ( self.Dstar ): + if ( D0taggedjet ): + continue + + if (self.softpion_action >= 2 and Dstartaggedjet): + if (softpion_index == -1): #skip because soft pion is not in the jet! + continue + + obs = self.calculate_observable( + observable, jet, jet_groomed_lund, jetR, obs_setting, + grooming_setting, obs_label, jet.pt()) + + for parton_type in ["charm"]: # parton_types: + #fill parton hnsparse info + if (self.replaceKPpairs or self.phimeson): # phimeson has bad naming convention but is properly filled here + D0_px = self.D0particleinfo.px() + D0_py = self.D0particleinfo.py() + D0_pt = math.sqrt(D0_px * D0_px + D0_py * D0_py) + else: + D0_pt = -1 + + # Traditional anti-kT jet + getattr(self, ('h3D_%s_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h3D_%s_JetPt_%s_R%s' % (observable, parton_type, jetR))).Fill( + jet.pt(), D0_pt, obs) + + # Custom tagged jets + if jade_tagged: + getattr(self, ('h3D_%s_jade_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h3D_%s_jade_JetPt_%s_R%s' % (observable, parton_type, jetR))).Fill( + jet.pt(), D0_pt, obs) + + if wta_tagged: + getattr(self, ('h3D_%s_wta_JetPt_%s_R%s_%s' % (observable, parton_type, jetR, obs_label)) if \ + len(obs_label) else ('h3D_%s_wta_JetPt_%s_R%s' % (observable, parton_type, jetR))).Fill( + jet.pt(), D0_pt, obs) + + setattr(self, "count1_R%s" % jetR_str, count1) + setattr(self, "count2_R%s" % jetR_str, count2) + + #--------------------------------------------------------------- + # Calculate the observable given a jet + #--------------------------------------------------------------- + def calculate_observable(self, observable, jet, jet_groomed_lund, + jetR, obs_setting, grooming_setting, obs_label, jet_pt_ungroomed): + + ''' + if observable == "EEC": + + # idk about this grooming setting.... + # WRITE STUFF HERE + + #TODO: get the dcand from the jet! (need the djmm??) + #j = jet[0] + #dcand = djmm.get_Dcand_in_jet(jet) + + # Extract information for EEC + constituents = fj.sorted_by_pt(jet.constituents()) + c_select = fj.vectorPJ() + trk_thrd = 1 # track pt threshold + + # apply pT threshold on jet constituents + for c in constituents: + if c.pt() < trk_thrd: + break + #print("constituent used for pair =", c) + c_select.append(c) + + dphi_cut = -9999 + deta_cut = -9999 + + # print("The jet constit being sent in are ") + # for c_sel in c_select: + # print("index", pythiafjext.getPythia8Particle(c_sel).index(), "id", pythiafjext.getPythia8Particle(c_sel).id(), "userindex", c_sel.user_index()) + #new_corr = ecorrel.CorrelatorBuilder(c_select, dcand, jet_pt_ungroomed, 2, 1, dphi_cut, deta_cut) #jet, D, scale, max, power, dphicut, detacut + if (self.softpion_action == 2): + new_corr = ecorrel.CorrelatorBuilder(c_select, self.D0particleinfo_psjet, self.softpion_particleinfo_psjet, jet.perp(), 2, 1, dphi_cut, deta_cut, False) + elif (self.softpion_action == 3): + new_corr = ecorrel.CorrelatorBuilder(self.D0particleinfo_psjet, self.softpion_particleinfo_psjet, jet.perp(), 2, 1, dphi_cut, deta_cut, True) + elif (self.softpion_action == 4): + # this later can be combined w softpion_action=2 + new_corr = ecorrel.CorrelatorBuilder(c_select, self.D0particleinfo_psjet, self.softpion_particleinfo_psjet, jet.perp(), 2, 1, dphi_cut, deta_cut, True) + else: + new_corr = ecorrel.CorrelatorBuilder(c_select, jet.perp(), 2, 1, dphi_cut, deta_cut) + + #print out weights here + # for index in range(new_corr.correlator(2).rs().size()): + # print("weight", new_corr.correlator(2).weights()[index]) + + return new_corr #new_corr.correlator(2).rs()[index] + + #return fjext.lambda_beta_kappa(jet, jet_groomed_lund.pair(), obs_setting, 1, jetR) \ + # if grooming_setting else fjext.lambda_beta_kappa(jet, obs_setting, 1, jetR) + ''' + + if observable == "mass": + + if grooming_setting: + j_groomed = jet_groomed_lund.pair() + if not j_groomed.has_constituents(): + # Untagged jet -- record underflow value + return -1 + else: + return j_groomed.m() + + return jet.m() + + # Should not be any other observable + raise ValueError("Observable %s not implemented" % observable) + + + def checkDecayChannel(self, particle, event): #(part, mcArray): # what type is part + + if(not event): + return EMesonDecayChannel.kUnknownDecay + + decay = EMesonDecayChannel.kUnknownDecay + + absPdgPart = particle.idAbs() + + if(len(particle.daughterList()) == 2): + d1_index = particle.daughterList()[0] #don't use daughter1() and daughter(2) + d2_index = particle.daughterList()[1] + d1 = event[d1_index] + d2 = event[d2_index] + + if(not d1 or not d2): + return decay + + # print("checkpoint 3") + + + absPdg1 = d1.idAbs() + absPdg2 = d2.idAbs() + + if(absPdgPart == 421): # D0 -> K pi + if((absPdg1 == 211 and absPdg2 == 321) or (absPdg1 == 321 and absPdg2 == 211)): # pi K or K pi - QUESTION: does this account for k and pi being opposite signs? + decay = EMesonDecayChannel.kDecayD0toKpi + + # TODO: can insert if (self.Dstar) later + + # Look at D0's mother particles + # print("current particle ID is", absPdgPart) + mother_indices = particle.motherList() + if (len(mother_indices) != 1): + return decay #just return D0->Kpi because D0 didn't come from a D* + # print("MOTHERS", len(mother_indices)) # there's a lot of these... + # print(mother_indices) + for mother_index in mother_indices: + mother = event[mother_index] + absPdg_mother = mother.idAbs() + + if (absPdg_mother == 413): # if mother is D*+/- + # if (len(mother_indices != 1)): + # print("There were", len(mother_indices), "mothers in this event!") + # look at daughters of mother + if(len(mother.daughterList()) == 2): + d1_index = mother.daughterList()[0] #don't use daughter1() and daughter(2) + d2_index = mother.daughterList()[1] + d1 = event[d1_index] + d2 = event[d2_index] + if(not d1 or not d2): + return decay + absPdg1 = d1.idAbs() + absPdg2 = d2.idAbs() + + if((absPdg1 == 421 and absPdg2 == 211) or (absPdg1 == 211 and absPdg2 == 421)): # D0 pi or pi D0 + decay = EMesonDecayChannel.kDecayDStartoKpipi + break #TODO: should this break be earlier? is it possible to have multiple mothers that are D*? + + # print(event) + + return decay + + + # save D0 particle info to save to THnSparse + def getD0Info(self, particle): + self.D0particleinfo = particle + return + + + def getParticleAsPseudojet(self, particle): + psjet = fj.PseudoJet(particle.px(), particle.py(), particle.pz(), particle.e()) + + psjet.set_user_index(particle.index()) #should be + user_index_offset but that is 0 + # _pinfo = PythiaParticleInfo(pythia.event[particle.index()]) + # psjet.set_user_info(_pinfo) + + return psjet + + # check if D0 is prompt - should only send D0 that does not come from D* here + # also assuming that D0's mother is c (direct mother, not with other generations in between) + def checkPrompt(self, D0particle, event): + + promptness = Promptness.kUnknown + + absPdgPart = D0particle.idAbs() + motherlist_indices = D0particle.motherList() + # if (len(motherlist_indices) != 1): + # return + print("D0's mothers", motherlist_indices) + for mother_index in motherlist_indices: + mother = event[mother_index] + absPdg_mother = mother.idAbs() + print("D0 mother ID", absPdg_mother) + + if (absPdg_mother == 4): #charm + # check if mother of charm is beauty + charms_mother_indices = mother.motherList() + print("charm's mothers", charms_mother_indices) + + # if there are no mothers??? + if len(charms_mother_indices) == 0: + promptness = Promptness.kPrompt + break + + for charms_mother_index in charms_mother_indices: + charms_mother = event[charms_mother_index] + absPdg_charms_mother = charms_mother.idAbs() + print("charm mother ID", absPdg_charms_mother) + + if (absPdg_charms_mother == 4): #charm + promptness = Promptness.kPrompt + break + if (absPdg_charms_mother == 5): #beauty + promptness = Promptness.kNonPrompt + break + #else: would be unknown (if c's parentage is something but not a b...??) + break + + return promptness + + def printD0mothers(self, particle, event, num): + # if num == 10: #break statement + # print("Exited with num=10 ") + # return + + print("This is generation", num) + + motherlist_indices = particle.motherList() + motherlist = [event[i].name() for i in motherlist_indices] + motherlist_status = [event[i].status() for i in motherlist_indices] + print("The indices are", motherlist_indices) + print("The mothers are", motherlist) + print("The statuss are", motherlist_status) + + if len(motherlist_indices) == 0: + return + + for mother_index in motherlist_indices: + + # if mother_index < 5: #break statement + # print("Exited with mother_index of ", mother_index) + # break + + mother = event[mother_index] + print("Following mother ", mother.name(), "with index", mother_index) + self.printD0mothers(mother, event, num+1) + + # check if D0's mother is D* + def checkD0motherIsDstar(self, D0particle, event): + motherisDstar = False + + if (D0particle.idAbs() == 421): #D0 + + mother_indices = D0particle.motherList() + if len(mother_indices) == 1: # assuming D* is the only mother to D0 + mo1 = mother_indices[0] + if event[mo1].idAbs() == 413: #D* + motherisDstar = True + + # std::cout << "is mother a Dstar? " << motherisDstar << std::endl; + return motherisDstar + + def getSoftPion(self, D0particle, event, jet_const_arr): + softpion_index = -1 + + Dstar_index = D0particle.motherList()[0] + poss_softpion_indices = event[Dstar_index].daughterList() + #TODO: check if there are only two daughters?? + for daughter_index in poss_softpion_indices: + poss_softpion_idAbs = event[daughter_index].idAbs() + if poss_softpion_idAbs == 211: + softpion_index = daughter_index + + # also check that the pion is in the jet constituents + # print("softpion index", softpion_index) + if len(jet_const_arr) > 0: + if (self.checkIfPartInJetConst(jet_const_arr, softpion_index, 1) == False): + softpion_index = -1 + # print(" softpion index, softpion not in jet", softpion_index) + + return softpion_index + + #--------------------------------------------------------------- + # Initiate scaling of all histograms and print final simulation info + #--------------------------------------------------------------- + def scale_print_final_info(self, pythia): + # Scale all jet histograms by the appropriate factor from generated cross section and the number of accepted events + scale_f = pythia.info.sigmaGen() / self.hNevents.GetBinContent(1) + print("pythia.info.sigmaGen() is", pythia.info.sigmaGen()) + print("scale_f is", scale_f) + #print("int(pythia.info.nAccepted())", int(pythia.info.nAccepted())) + + for jetR in self.jetR_list: + hist_list_name = "hist_list_R%s" % str(jetR).replace('.', '') + # print(hist_list_name) + for h in getattr(self, hist_list_name): + # if 'jetlevel' in h.GetTitle(): + # continue + h.Scale(scale_f) + + N_rejected_hadron = int(pythia.info.nAccepted() - self.hNevents.GetBinContent(1)) + print("N total final events:", int(self.hNevents.GetBinContent(1)), "with", + self.parton_counter, "events rejected at parton selection and", + N_rejected_hadron, "events rejected at hadronization step") + self.hNevents.SetBinError(1, 0) + self.hD0Nevents.SetBinError(1, 0) + self.hD0KpiNevents.SetBinError(1, 0) + self.hD0KpiNjets.SetBinError(1, 0) + self.hDstarNjets.SetBinError(1, 0) + + if self.phimeson: + self.hphiNevents.SetBinError(1, 0) + self.hphiNjets.SetBinError(1, 0) + + if self.make_durations: + # Print information about the durations + print("Average time on SD+JADE tagging: %f milliseconds per jet" % ( + sum(self.jade_durations) / len(self.jade_durations) * 1000)) + print("Average time on WTA tagging: %f milliseconds per jet" % ( + sum(self.wta_durations) / len(self.wta_durations) * 1000)) + +################################################################ +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', + prog=os.path.basename(__file__)) + pyconf.add_standard_pythia_args(parser) + # Could use --py-seed + parser.add_argument('--user-seed', help='PYTHIA starting seed', default=1111, type=int) + parser.add_argument('-o', '--output-dir', action='store', type=str, default='./', + help='Output directory for generated ROOT file(s)') + parser.add_argument('--tree-output-fname', default="AnalysisResults.root", type=str, + help="Filename for the (unscaled) generated particle ROOT TTree") + parser.add_argument('--MPIon', action='store', type=int, default=1, + help="MPI on or off") + parser.add_argument('--ISRon', action='store', type=int, default=1, + help="ISR on or off") + parser.add_argument('-c', '--config_file', action='store', type=str, default='config/angularity.yaml', + help="Path of config file for observable configurations") + parser.add_argument('--nocharmdecay', action='store', type=int, default=0, help="'1' turns charm decays off") + parser.add_argument('--weightON', action='store', type=int, default=0, help="'1' turns weights on") + parser.add_argument('--leadingptcut', action='store', type=float, default=0, help="leading track pt cut") + parser.add_argument('--replaceKP', action='store', type=int, default=0, help="'1' replaces the K/pi pairs with D0") + parser.add_argument('--onlygg2ccbar', action='store', type=int, default=0, help="'1' runs only gg->ccbar events, '0' runs all events") + parser.add_argument('--onlyccbar', action='store', type=int, default=0, help="'1' runs only hard->ccbar events, '0' runs all events") + parser.add_argument('--DstarON', action='store', type=int, default=0, help="'1' looks at EEC for D* only") + parser.add_argument('--chinitscat', action='store', type=int, default=0, help="'0' runs all events, \ + '1' runs only hard->ccbar events, '2' runs only gg->ccbar events, '3' runs only D0->Kpi events") + parser.add_argument('--D0withDstarON', action='store', type=int, default=0, help="'1' looks at EEC for D0 and D0 from D*") + parser.add_argument('--difNorm', action='store', type=int, default=0, help="'1' normalizes D* with (D0+D*)") + parser.add_argument('--softpion', action='store', type=int, default=0, help="'1' removes the soft pion from D* distribution, \ + '2' gets only pairs of soft pion w other charged particles,'3' gets only the pair of soft pion with D0, \ + '4' gives soft pion with everything") + parser.add_argument('--giveptRL', action='store', type=int, default=0, help="'1' changes THnSparse to calculate pT*RL (instead of RL)") + parser.add_argument('--runphi', action='store', type=int, default=0, help="'1' looks at the phi meson (not allowed to decay)") + + + args = parser.parse_args() + pinfo("The arguments to run are: ", args) + + # If invalid configFile is given, exit + if not os.path.exists(args.config_file): + print('File \"{0}\" does not exist! Exiting!'.format(args.config_file)) + sys.exit(0) + + # Use PYTHIA seed for event generation + if args.user_seed < 0: + args.user_seed = 1111 + + # Have at least 1 event + if args.nev < 1: + args.nev = 1 + + print("args for charmdecay", args.nocharmdecay) + + process = PythiaQuarkGluon(config_file=args.config_file, output_dir=args.output_dir, args=args) + process.pythia_quark_gluon(args)