diff --git a/.gitignore b/.gitignore index b06920b275f..836aadc9444 100644 --- a/.gitignore +++ b/.gitignore @@ -3,8 +3,10 @@ *.so *.pdf *.zip -fit.exe *~ -*#* -*.d __init__.py +output +*.code-workspace +/.python +/.vscode +.DS_Store \ No newline at end of file diff --git a/Common/BuildFile.xml b/Common/BuildFile.xml index 798808346e1..8416ef8eeba 100644 --- a/Common/BuildFile.xml +++ b/Common/BuildFile.xml @@ -1,4 +1,7 @@ + + + diff --git a/Common/interface/AnalysisTypes.h b/Common/interface/AnalysisTypes.h index c1c577ffe57..194eea4a1b5 100644 --- a/Common/interface/AnalysisTypes.h +++ b/Common/interface/AnalysisTypes.h @@ -54,17 +54,23 @@ ENUM_NAMES(GenQcdMatch) = { { GenQcdMatch::Gluon, "gen_gluon" } }; -enum class TauType { e = 0, mu = 1, tau = 2, jet = 3 }; -ENUM_NAMES(TauType) = { - { TauType::e, "e" }, { TauType::mu, "mu" }, { TauType::tau, "tau" }, { TauType::jet, "jet" } +enum class LegType { e = 1, mu = 2, tau = 4, jet = 8 }; +ENUM_NAMES(LegType) = { + { LegType::e, "e" }, { LegType::mu, "mu" }, { LegType::tau, "tau" }, { LegType::jet, "jet" } }; -inline constexpr TauType GenMatchToTauType(GenLeptonMatch gen_match) +inline constexpr LegType GenMatchToLegType(GenLeptonMatch gen_match) { - if(gen_match == GenLeptonMatch::Electron || gen_match == GenLeptonMatch::TauElectron) return TauType::e; - if(gen_match == GenLeptonMatch::Muon || gen_match == GenLeptonMatch::TauMuon) return TauType::mu; - if(gen_match == GenLeptonMatch::Tau) return TauType::tau; - return TauType::jet; + if(gen_match == GenLeptonMatch::Electron || gen_match == GenLeptonMatch::TauElectron) return LegType::e; + if(gen_match == GenLeptonMatch::Muon || gen_match == GenLeptonMatch::TauMuon) return LegType::mu; + if(gen_match == GenLeptonMatch::Tau) return LegType::tau; + return LegType::jet; } +enum class TauSelection { gen = 1, pt = 2, MVA = 4, DeepTau = 8 }; +ENUM_NAMES(TauSelection) = { + { TauSelection::gen, "gen" }, { TauSelection::pt, "pt" }, { TauSelection::MVA, "MVA"}, + { TauSelection::DeepTau, "DeepTau" } +}; + } // namespace analysis diff --git a/Common/interface/CutTools.h b/Common/interface/CutTools.h new file mode 100644 index 00000000000..53fc38b3767 --- /dev/null +++ b/Common/interface/CutTools.h @@ -0,0 +1,204 @@ +/*! Common tools and definitions to apply cuts. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "SmartHistogram.h" + +namespace cuts { + +class cut_failed : public std::exception { +public: + cut_failed(size_t parameter_id) noexcept + : _param_id(parameter_id) + { + std::ostringstream ss; + ss << "Cut requirements are not fulfilled for parameter id = " << _param_id << "."; + message = ss.str(); + } + + ~cut_failed() noexcept {} + + virtual const char* what() const noexcept { return message.c_str(); } + size_t param_id() const noexcept { return _param_id; } + +private: + size_t _param_id; + std::string message; +}; + +template +ValueType fill_histogram(ValueType value, Histogram& histogram, double weight) +{ + histogram.Fill(value,weight); + return value; +} + + +class ObjectSelector{ +public: + + virtual ~ObjectSelector(){} + + void incrementCounter(size_t param_id, const std::string& param_label) + { + if (counters.size() < param_id) + throw std::runtime_error("counters out of range"); + if (counters.size() == param_id){ //counters and selections filled at least once + counters.push_back(0); + selections.push_back(0); + selectionsSquaredErros.push_back(0); + const std::string label = make_unique_label(param_label); + labels.push_back(label); + label_set.insert(label); + } + counters.at(param_id)++; + } + + void fill_selection(double weight = 1.0){ + for (unsigned n = 0; n < counters.size(); ++n){ + if(counters.at(n) > 0) { + selections.at(n) += weight; + selectionsSquaredErros.at(n) += weight * weight; + } + counters.at(n) = 0; + } + } + + template + std::vector collect_objects(double weight, size_t n_objects, const Selector& selector, + const Comparitor& comparitor) + { + std::vector selected; + for (size_t n = 0; n < n_objects; ++n) { + try { + const ObjectType selectedCandidate = selector(n); + selected.push_back(selectedCandidate); + } catch(cuts::cut_failed&) {} + } + + fill_selection(weight); + std::sort(selected.begin(), selected.end(), comparitor); + + return selected; + } + +private: + std::string make_unique_label(const std::string& label) + { + if(!label_set.count(label)) return label; + for(size_t n = 2; ; ++n) { + std::ostringstream ss; + ss << label << "_" << n; + if(!label_set.count(ss.str())) return ss.str(); + } + } + +protected: + std::vector counters; + std::vector selections; + std::vector selectionsSquaredErros; + std::vector labels; + std::set label_set; +}; + +namespace detail { +struct DefaultSelectionManager{ + template + void FillHistogram(ValueType value, const std::string& histogram_name) {} +}; +} // namespace detail + +template +class Cutter { +public: + explicit Cutter(ObjectSelector* _objectSelector, SelectionManager* _selectionManager = nullptr) + : objectSelector(_objectSelector), selectionManager(_selectionManager), param_id(0) {} + + bool Enabled() const { return objectSelector != nullptr; } + int CurrentParamId() const { return param_id; } + + void operator()(bool expected, const std::string& label) + { + (*this)(expected, label, expected); + } + + template + void operator()(bool expected, const std::string& label, const ValueType& value) + { + if(selectionManager) { + try { + selectionManager->FillHistogram(value, label); + }catch(std::exception& e) { + std::cout << "ERROR: " << e.what() << std::endl; + } + } + if(Enabled()) { + ++param_id; + if(!expected) + throw cut_failed(param_id -1); + objectSelector->incrementCounter(param_id - 1, label); + } + } + + bool test(bool expected, const std::string& label) + { + try { + (*this)(expected, label); + return true; + } catch(cut_failed&) {} + return false; + } + +private: + ObjectSelector* objectSelector; + SelectionManager* selectionManager; + size_t param_id; +}; + +} // cuts + +namespace root_ext { + +template<> +class SmartHistogram : public cuts::ObjectSelector, public AbstractHistogram { +public: + using RootContainer = TH1D; + + SmartHistogram(const std::string& name) : AbstractHistogram(name) {} + + void SetSave(bool _save) + { + save = _save; + } + + virtual void WriteRootObject() + { + if(!save || !selections.size() || !GetOutputDirectory() ) + return; + std::unique_ptr selection_histogram( + new TH1D(Name().c_str(), Name().c_str(),selections.size(),-0.5,-0.5+selections.size())); + for (unsigned n = 0; n < selections.size(); ++n){ + const std::string label = labels.at(n); + selection_histogram->GetXaxis()->SetBinLabel(n+1, label.c_str()); + selection_histogram->SetBinContent(n+1,selections.at(n)); + selection_histogram->SetBinError(n+1,std::sqrt(selectionsSquaredErros.at(n))); + } + root_ext::WriteObject(*selection_histogram, GetOutputDirectory()); + } + +private: + bool save{true}; +}; +} diff --git a/Common/interface/GenTruthTools.h b/Common/interface/GenTruthTools.h index 8b77672ed64..7974fad1be6 100644 --- a/Common/interface/GenTruthTools.h +++ b/Common/interface/GenTruthTools.h @@ -13,24 +13,42 @@ namespace analysis { namespace gen_truth { +struct FinalState { +public: + enum class ParticleType { visible, light_lepton, neutrino, gamma, charged_hadron, neutral_hadron }; + + explicit FinalState(const reco::GenParticle& particle, const std::set& pdg_to_exclude = {}, + const std::set& particles_to_exclude = {}); + + const std::set& getParticles(ParticleType type) { return particles[type]; } + const LorentzVectorXYZ& getMomentum(ParticleType type) { return momentum[type]; } + size_t count(ParticleType type) { return getParticles(type).size(); } + +private: + void findFinalStateParticles(const reco::GenParticle& particle, const std::set& pdg_to_exclude, + const std::set& particles_to_exclude); + void addParticle(const reco::GenParticle& particle); + +private: + std::map> particles; + std::map momentum; +}; + struct LeptonMatchResult { GenLeptonMatch match{GenLeptonMatch::NoMatch}; - const reco::GenParticle* gen_particle{nullptr}; - std::vector visible_daughters; - LorentzVectorXYZ visible_daughters_p4; - int n_chargedParticles; - int n_neutralParticles; + const reco::GenParticle *gen_particle_firstCopy{nullptr}, *gen_particle_lastCopy{nullptr}; + std::set visible_daughters, visible_rad; + LorentzVectorXYZ visible_p4, visible_rad_p4; + unsigned n_charged_hadrons{0}, n_neutral_hadrons{0}, n_gammas{0}, n_gammas_rad{0}; }; -void FindFinalStateDaughters(const reco::GenParticle& particle, std::set& daughters, - const std::set& pdg_to_exclude); - -LorentzVectorXYZ GetFinalStateMomentum(const reco::GenParticle& particle, std::vector& visible_daughters, - bool excludeInvisible, bool excludeLightLeptons); - -LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, - const reco::GenParticleCollection& genParticles); +const reco::GenParticle* FindTerminalCopy(const reco::GenParticle& genParticle, bool first); +bool FindLeptonGenMatch(const reco::GenParticle& particle, LeptonMatchResult& result, + const LorentzVectorM* ref_p4 = nullptr, double* best_match_dr2 = nullptr); +std::vector CollectGenLeptons(const reco::GenParticleCollection& genParticles); +LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, const reco::GenParticleCollection& genParticles); +LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, const std::vector& genLeptons); float GetNumberOfPileUpInteractions(edm::Handle>& pu_infos); diff --git a/Common/interface/NumericPrimitives.h b/Common/interface/NumericPrimitives.h new file mode 100644 index 00000000000..5c22bc867bf --- /dev/null +++ b/Common/interface/NumericPrimitives.h @@ -0,0 +1,844 @@ +/*! Definition of the primitives that extend CERN ROOT functionality. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include +#include +#include "TextIO.h" + +namespace analysis { + +enum class RangeBoundaries { Open, MinIncluded, MaxIncluded, BothIncluded }; + +namespace detail { +template::value> +struct RangeSize { + static T size(const T& min, const T& max, RangeBoundaries) { return max - min; } +}; + +template +struct RangeSize { + static T size(T min, T max, RangeBoundaries boundaries) + { + T delta = max - min; + if(boundaries == RangeBoundaries::BothIncluded) + delta += T(1); + else if(boundaries == RangeBoundaries::Open && delta != T(0)) + delta -= T(1); + return delta; + } +}; + +template::value> +struct Abs { + static T abs(T value) { return std::abs(value); } +}; + +template +struct Abs { + static T abs(T value) { return value; } +}; + +template +inline T FloatRound(T x, T /*ref*/) +{ + return x; +} + +template<> +inline float FloatRound(float x, float ref) +{ + static constexpr float precision = 1e-6f; + if(ref == 0) return x; + return std::round(x / ref / precision) * precision * ref; +} + +template<> +inline double FloatRound(double x, double ref) +{ + static constexpr double precision = 1e-8; + if(ref == 0) return x; + return std::round(x / ref / precision) * precision * ref; +} + + +template +inline size_t GetNumberOfGridPoints(T min, T max, T step) +{ + return static_cast((max - min) / step); +} + +template<> +inline size_t GetNumberOfGridPoints(float min, float max, float step) +{ + static constexpr float precision = 1e-6f; + return static_cast(std::round((max - min) / step / precision) * precision); +} + +template<> +inline size_t GetNumberOfGridPoints(double min, double max, double step) +{ + static constexpr double precision = 1e-8; + return static_cast(std::round((max - min) / step / precision) * precision); +} + +} // namespace detail + +template +struct Range { + using ValueType = T; + using ConstRefType = + typename std::conditional::value, ValueType, const ValueType&>::type; + + static const std::pair GetBoundariesSymbols(RangeBoundaries b) + { + static const std::map> symbols = { + { RangeBoundaries::Open, { '(', ')' } }, + { RangeBoundaries::MinIncluded, { '[', ')' } }, + { RangeBoundaries::MaxIncluded, { '(', ']' } }, + { RangeBoundaries::BothIncluded, { '[', ']' } }, + }; + return symbols.at(b); + } + + static RangeBoundaries CreateBoundaries(bool include_min, bool include_max) + { + if(include_min && !include_max) + return RangeBoundaries::MinIncluded; + if(!include_min && include_max) + return RangeBoundaries::MaxIncluded; + if(include_min && include_max) + return RangeBoundaries::BothIncluded; + return RangeBoundaries::Open; + } + + Range() : _min(0), _max(0) {} + Range(ConstRefType min, ConstRefType max, RangeBoundaries boundaries = RangeBoundaries::BothIncluded) : + _min(min), _max(max), _boundaries(boundaries) + { + if(!IsValid(min, max)) + throw exception("Invalid range [%1%, %2%].") % min % max; + } + Range(const Range& other) : _min(other._min), _max(other._max), _boundaries(other._boundaries) {} + Range(const Range& other, RangeBoundaries boundaries) : + _min(other._min), _max(other._max), _boundaries(boundaries) {} + virtual ~Range() {} + Range& operator=(const Range& other) + { + _min = other._min; + _max = other._max; + _boundaries = other._boundaries; + return *this; + } + + ConstRefType min() const { return _min; } + ConstRefType max() const { return _max; } + T size() const { return detail::RangeSize::size(min(), max(), boundaries()); } + + RangeBoundaries boundaries() const { return _boundaries; } + bool min_included() const + { + return boundaries() == RangeBoundaries::MinIncluded || boundaries() == RangeBoundaries::BothIncluded; + } + bool max_included() const + { + return boundaries() == RangeBoundaries::MaxIncluded || boundaries() == RangeBoundaries::BothIncluded; + } + + bool Contains(ConstRefType v) const + { + if(min() == max()) + return (min_included() || max_included()) && v == min(); + const bool min_cond = (min_included() && v >= min()) || v > min(); + const bool max_cond = (max_included() && v <= max()) || v < max(); + return min_cond && max_cond; + } + static bool IsValid(ConstRefType min, ConstRefType max) { return min <= max; } + + Range Extend(ConstRefType v, bool include = true) const + { + if(Contains(v)) + return *this; + const auto new_min = std::min(min(), v); + const auto new_max = std::max(max(), v); + RangeBoundaries b; + if(new_min == v) { + if(include) + b = max_included() ? RangeBoundaries::BothIncluded : RangeBoundaries::MinIncluded; + else + b = max_included() ? RangeBoundaries::MaxIncluded : RangeBoundaries::Open; + } else { + if(include) + b = min_included() ? RangeBoundaries::BothIncluded : RangeBoundaries::MaxIncluded; + else + b = min_included() ? RangeBoundaries::MinIncluded : RangeBoundaries::Open; + } + return Range(new_min, new_max, b); + } + + bool operator ==(const Range& other) const + { + return min() == other.min() && max() == other.max() && boundaries() == other.boundaries(); + } + bool operator !=(const Range& other) const { return !(*this == other); } + + bool Includes(const Range& other) const + { + const bool min_cond = min() == other.min() ? min_included() || !other.min_included() : min() < other.min(); + const bool max_cond = max() == other.max() ? max_included() || !other.max_included() : max() > other.max(); + return min_cond && max_cond; + } + bool Overlaps(const Range& other) const + { + if(min() == max()) + return (min_included() || max_included()) && other.Contains(min()); + const bool min_cond = min() == other.max() ? min_included() && other.max_included() : min() < other.max(); + const bool max_cond = max() == other.min() ? max_included() && other.min_included() : max() > other.min(); + return min_cond && max_cond; + } + Range Combine(const Range& other) const + { + if(!Overlaps(other)) + throw exception("Unable to combine non overlapping ranges."); + const auto new_min = std::min(min(), other.min()); + const auto new_max = std::max(max(), other.max()); + const bool include_min = (new_min == min() && min_included()) + || (new_min == other.min() && other.min_included()); + const bool include_max = (new_max == max() && max_included()) + || (new_max == other.max() && other.max_included()); + const RangeBoundaries b = CreateBoundaries(include_min, include_max); + return Range(new_min, new_max, b); + } + + std::string ToString(char sep = ':') const + { + std::ostringstream ss; + const auto b_sym = GetBoundariesSymbols(boundaries()); + if(boundaries() != RangeBoundaries::BothIncluded) + ss << b_sym.first; + ss << min() << sep << max(); + if(boundaries() != RangeBoundaries::BothIncluded) + ss << b_sym.second; + return ss.str(); + } + + static Range Parse(const std::string& str, const std::string& separators=": \t") + { + const auto values = SplitValueList(str, true, separators, true); + if(values.size() != 2) + throw exception("Invalid range '%1%'.") % str; + return Make(values); + } + + static Range Read(std::istream& stream, const std::string& separators=": \t") + { + const auto values = ReadValueList(stream, 2, true, separators, true); + return Make(values); + } + +private: + static Range Make(const std::vector& values) + { + static const auto opened_b_symbols = GetBoundariesSymbols(RangeBoundaries::Open); + static const auto closed_b_symbols = GetBoundariesSymbols(RangeBoundaries::BothIncluded); + bool include_min = true, include_max = true; + std::string min_str = values.at(0), max_str = values.at(1); + if(min_str.size() && (min_str.front() == opened_b_symbols.first || min_str.front() == closed_b_symbols.first)) { + include_min = min_str.front() == closed_b_symbols.first; + min_str.erase(0, 1); + } + if(max_str.size() && (max_str.back() == opened_b_symbols.second || max_str.back() == closed_b_symbols.second)) { + include_max = max_str.back() == closed_b_symbols.second; + max_str.erase(max_str.size() - 1, 1); + } + const T min = ::analysis::Parse(min_str); + const T max = ::analysis::Parse(max_str); + const RangeBoundaries b = CreateBoundaries(include_min, include_max); + return Range(min, max, b); + } + +private: + T _min, _max; + RangeBoundaries _boundaries; +}; + +template +std::ostream& operator<<(std::ostream& s, const Range& r) +{ + s << r.ToString(':'); + return s; +} + +template +std::istream& operator>>(std::istream& s, Range& r) +{ + r = Range::Read(s); + return s; +} + +template +struct RelativeRange { + using ValueType = T; + using ConstRefType = typename Range::ConstRefType; + RelativeRange() : _down(0), _up(0) {} + RelativeRange(ConstRefType down, ConstRefType up) : _down(down), _up(up) + { + if(!IsValid(down, up)) + throw exception("Invalid relative range [%1%, %2%].") % down % up; + } + + ConstRefType down() const { return _down; } + ConstRefType up() const { return _up; } + Range ToAbsoluteRange(ConstRefType v) const { return Range(v + down(), v + up()); } + static bool IsValid(ConstRefType down, ConstRefType up) { return down <= 0 && up >= 0; } + + std::string ToString(char sep = ' ') const + { + std::ostringstream ss; + ss << down() << sep << up(); + return ss.str(); + } + + static RelativeRange Parse(const std::string& str, const std::string& separators=": \t") + { + const auto values = SplitValueList(str, true, separators, true); + if(values.size() != 2) + throw exception("Invalid relative range '%1%'.") % str; + return Make(values); + } + + static RelativeRange Read(std::istream& stream, const std::string& separators=": \t") + { + const auto values = ReadValueList(stream, 2, true, separators, true); + return Make(values); + } + +private: + static RelativeRange Make(const std::vector& values) + { + const T down = ::analysis::Parse(values.at(0)); + const T up = ::analysis::Parse(values.at(1)); + return RelativeRange(down, up); + } + +private: + T _down, _up; +}; + +template +std::ostream& operator<<(std::ostream& s, const RelativeRange& r) +{ + s << r.ToString(':'); + return s; +} + +template +std::istream& operator>>(std::istream& s, RelativeRange& r) +{ + r = RelativeRange::Read(s); + return s; +} + +template +struct RangeWithStep : public Range { + using ValueType = typename Range::ValueType; + using ConstRefType = typename Range::ConstRefType; + + + enum class PrintMode { Step = 0, NGridPoints = 1, NBins = 2 }; + struct iterator { + iterator(const RangeWithStep& _range, size_t _pos) : range(&_range), pos(_pos) {} + iterator& operator++() { ++pos; return *this; } + iterator operator++(int) { iterator iter(*this); operator++(); return iter; } + bool operator==(const iterator& other) { return range == other.range && pos == other.pos;} + bool operator!=(const iterator& other) { return !(*this == other); } + T operator*() { return range->grid_point_value(pos); } + private: + const RangeWithStep *range; + size_t pos; + }; + + RangeWithStep() : _step(0) {} + RangeWithStep(ConstRefType min, ConstRefType max, ConstRefType step) : + Range(min, max, RangeBoundaries::BothIncluded), _step(step) + { + } + + ConstRefType step() const { return _step; } + T grid_point_value(size_t index) const + { + const T ref = std::max(detail::Abs::abs(this->min()), detail::Abs::abs(this->max())); + return detail::FloatRound(this->min() + T(index) * step(), ref); + } + size_t n_grid_points() const + { + if(this->max() == this->min()) return 1; + if(step() == 0) + throw exception("Number of grid points is not defined for a non-point range with the step = 0."); + size_t n_points = detail::GetNumberOfGridPoints(this->min(), this->max(), step()); + if(this->Contains(grid_point_value(n_points))) + ++n_points; + return n_points; + } + size_t n_bins() const { return n_grid_points() - 1; } + + size_t find_bin(T value) const + { + if(!this->Contains(value)) + throw exception("find_bin: value is out of range."); + if(n_bins() == 0) + throw exception("find_bin: number of bins is 0."); + size_t bin_id = static_cast((value - this->min()) / step()); + if(bin_id == n_bins()) + --bin_id; + return bin_id; + } + + iterator begin() const { return iterator(*this, 0); } + iterator end() const { return iterator(*this, n_grid_points()); } + + std::string ToString(PrintMode mode = PrintMode::Step) const + { + std::ostringstream ss; + ss << this->min() << Separators().at(0) << this->max() << Separators().at(static_cast(mode)); + if(mode == PrintMode::Step) + ss << step(); + else if(mode == PrintMode::NGridPoints) + ss << n_grid_points(); + else if(mode == PrintMode::NBins) + ss << n_bins(); + else + throw exception("Unsupported RangeWithStep::PrintMode = %1%.") % static_cast(mode); + return ss.str(); + } + + static RangeWithStep Parse(const std::string& str) + { + const size_t first_split_pos = str.find_first_of(Separators()); + if(first_split_pos != std::string::npos) { + const size_t last_split_pos = str.find_first_of(Separators(), first_split_pos + 1); + if(last_split_pos != std::string::npos) { + const size_t end_split_pos = str.find_last_of(Separators()); + if(last_split_pos == end_split_pos) { + const size_t sep_pos = Separators().find(str.at(last_split_pos)); + const PrintMode mode = static_cast(sep_pos); + std::vector values; + values.push_back(str.substr(0, first_split_pos)); + values.push_back(str.substr(first_split_pos + 1, last_split_pos - first_split_pos - 1)); + values.push_back(str.substr(last_split_pos + 1)); + return Make(values, mode); + } + } + } + throw exception("Invalid range with step '%1%'.") % str; + } + +private: + static RangeWithStep Make(const std::vector& values, PrintMode mode) + { + const T min = ::analysis::Parse(values.at(0)); + const T max = ::analysis::Parse(values.at(1)); + T step(0); + if(mode == PrintMode::Step) { + step = ::analysis::Parse(values.at(2)); + } else if(mode == PrintMode::NGridPoints) { + size_t n = ::analysis::Parse(values.at(2)); + if(n == 0 || (n == 1 && max != min) || (n != 1 && max == min)) + throw exception("Invalid number of grid points."); + if(max != min) + step = (max - min) / T(n - 1); + } else if(mode == PrintMode::NBins) { + size_t n = ::analysis::Parse(values.at(2)); + if((n == 0 && max != min) || (n != 0 && max == min)) + throw exception("Invalid number of bins."); + if(max != min) + step = (max - min) / T(n); + } else { + throw exception("Unsupported RangeWithStep::PrintMode = %1%.") % static_cast(mode); + } + return RangeWithStep(min, max, step); + } + + static const std::string& Separators() { static const std::string sep = ":|/"; return sep; } + +private: + T _step; +}; + +template +std::ostream& operator<<(std::ostream& s, const RangeWithStep& r) +{ + s << r.ToString(); + return s; +} + +template +std::istream& operator>>(std::istream& s, RangeWithStep& r) +{ + std::string str; + s >> str; + r = RangeWithStep::Parse(str); + return s; +} + +template +struct Angle { + enum class Interval { Symmetric, Positive }; + static constexpr double Pi() { return boost::math::constants::pi(); } + static constexpr double NumberOfPiPerPeriod() { return double(n_pi_per_period_num) / n_pi_per_period_denom; } + static constexpr double FullPeriod() { return n_pi_per_period_num * Pi() / n_pi_per_period_denom; } + static constexpr double HalfPeriod() { return FullPeriod() / 2; } + static constexpr double RadiansToDegreesFactor() { return 180. / Pi(); } + + Angle() : _value(0), _interval(Interval::Symmetric) {} + Angle(double value, Interval interval = Interval::Symmetric) + : _value(AdjustValue(value, interval)), _interval(interval) {} + + double value() const { return _value; } + double value_degrees() const { return value() * RadiansToDegreesFactor(); } + Interval interval() const { return _interval; } + + Angle operator+(const Angle& other) const { return Angle(value() + other.value(), interval()); } + Angle operator-(const Angle& other) const { return Angle(value() - other.value(), interval()); } + + static const Range& AngleValuesRange(Interval interval) + { + static const std::map> range_map = { + { Interval::Symmetric, { -HalfPeriod(), HalfPeriod() } }, + { Interval::Positive, { 0, FullPeriod() } } + }; + return range_map.at(interval); + } + + static double AdjustValue(double value, Interval interval) + { + const Range& range = AngleValuesRange(interval); + value -= FullPeriod() * std::floor(value/FullPeriod()); + while(value < range.min() || value >= range.max()) + value += value < range.min() ? FullPeriod() : -FullPeriod(); + return value; + } + +private: + double _value; + Interval _interval; +}; + +template +std::ostream& operator<<(std::ostream& s, const Angle& a) +{ + s << a.value(); + return s; +} + +template +std::istream& operator>>(std::istream& s, Angle& a) +{ + double value; + s >> value; + a = Angle(value); + return s; +} + +template +struct Range> { + using A = Angle; + using ValueType = A; + + Range() : _min(0), _max(0) {} + Range(const A& min, const A& max) : _min(min), _max(max.value(), min.interval()) {} + virtual ~Range() {} + + const A& min() const { return _min; } + const A& max() const { return _max; } + A size() const { return A(_max.value() - _min.value(), A::Interval::Positive); } + Range ToValueRange() const + { + const double min_value = min().value(); + double max_value = max().value(); + if(max_value < min_value) + max_value += A::FullPeriod(); + return Range(min_value, max_value); + } + + bool Contains(const A& a) const + { + const Range min_a_value_range = Range(min(), a).ToValueRange(); + return ToValueRange().Contains(min_a_value_range.max()); + } + + static bool IsValid(const A& /*min*/, const A& /*max*/) { return true; } + + Range Extend(const A& a) const + { + if(Contains(a)) + return *this; + const A a_fixed(a.value(), min().interval()); + const Range extend_min(a_fixed, max()), extend_max(min(), a_fixed); + return extend_max.size().value() < extend_min.size().value() ? extend_max : extend_min; + } + + bool Includes(const Range& other) const + { + return Contains(other.min()) && Contains(other.max()); + } + + bool Overlaps(const Range& other) const + { + return Contains(other.min()) || Contains(other.max()) || other.Contains(min()); + } + + Range Combine(const Range& other) const + { + if(!Overlaps(other)) + throw exception("Unable to combine non overlapping ranges."); + if(Includes(other)) + return *this; + if(other.Includes(*this)) + return other; + if(Contains(other.min())) + return Range(min(), other.max()); + return Range(other.min(), max()); + } + std::string ToString(char sep = ' ') const + { + std::ostringstream ss; + ss << min() << sep << max(); + return ss.str(); + } + + static Range Parse(const std::string& str, const std::string& separators=": \t") + { + const auto values = SplitValueList(str, true, separators, true); + if(values.size() != 2) + throw exception("Invalid angle range '%1%'.") % str; + return Make(values); + } + + static Range Read(std::istream& stream, const std::string& separators=": \t") + { + const auto values = ReadValueList(stream, 2, true, separators, true); + return Make(values); + } + +private: + static Range Make(const std::vector& values) + { + const A min = ::analysis::Parse(values.at(0)); + const A max = ::analysis::Parse(values.at(1)); + return Range(min, max); + } + +private: + A _min, _max; +}; + +template +struct RangeMultiD { +public: + using ValueType = typename Range::ValueType; + explicit RangeMultiD(size_t n_dim) : ranges(n_dim) {} + explicit RangeMultiD(const std::vector& _ranges) : ranges(_ranges) {} + + size_t GetNumberOfDimensions() const { return ranges.size(); } + const Range& GetRange(size_t dim_id) const { Check(dim_id); return ranges.at(dim_id - 1); } + Range& GetRange(size_t dim_id) { Check(dim_id); return ranges.at(dim_id - 1); } + + bool Contains(const std::vector& point) const + { + if(point.size() != GetNumberOfDimensions()) + throw exception("Invalid number of dimensions."); + for(size_t n = 0; n < ranges.size(); ++n) + if(!ranges.at(n).Contains(point.at(n))) return false; + return true; + } + +private: + void Check(size_t dim_id) const + { + if(!dim_id || dim_id > GetNumberOfDimensions()) + throw exception("Wrong dimension id = %1%") % dim_id; + } + +private: + std::vector ranges; +}; + +template +struct MultiRange { + using ValueType = typename Range::ValueType; + using ConstRefType = typename Range::ConstRefType; + using RangeVec = std::vector; + + static const std::string& Separator() { static const std::string sep = ", "; return sep; } + + MultiRange() {} + explicit MultiRange(const RangeVec& _ranges) : ranges(_ranges) {} + + bool Contains(const ValueType& point) const + { + for(const auto& range : ranges) { + if(range.Contains(point)) + return true; + } + return false; + } + + bool Overlaps(const Range& other) const + { + for(const auto& range : ranges) { + if(range.Overlaps(other)) + return true; + } + return false; + } + + std::string ToString() const + { + std::ostringstream ss; + for(const auto& range : ranges) + ss << range << Separator(); + std::string str = ss.str(); + if(str.size()) + str.erase(str.size() - Separator().size()); + return str; + } + + static MultiRange Parse(const std::string& str) + { + const auto range_strs = SplitValueList(str, true, Separator(), true); + RangeVec ranges; + for(const auto& range_str : range_strs) + ranges.push_back(::analysis::Parse(range_str)); + return MultiRange(ranges); + } + +private: + RangeVec ranges; +}; + +template +std::ostream& operator<<(std::ostream& s, const MultiRange& r) +{ + s << r.ToString(); + return s; +} + +template +std::istream& operator>>(std::istream& s, MultiRange& r) +{ + std::string str; + std::getline(s, str); + r = MultiRange::Parse(str); + return s; +} + + +struct NumericalExpression { + NumericalExpression() : _value(0) {} + NumericalExpression(const std::string& expression) + : _expression(expression) + { + const std::string formula = boost::str(boost::format("x*(%1%)") % expression); + TF1 fn("", formula.c_str(), 0, 1); +// if(!fn.IsValid()) +// throw exception("Invalid numerical expression '%1%'") % expression; + _value = fn.Eval(1); + } + + const std::string& expression() const { return _expression; } + double value() const { return _value; } + operator double() const { return _value; } + +private: + std::string _expression; + double _value; +}; + +inline std::ostream& operator<<(std::ostream& s, const NumericalExpression& e) +{ + s << e.expression(); + return s; +} + +inline std::istream& operator>>(std::istream& s, NumericalExpression& e) +{ + std::string line; + std::getline(s, line); + e = NumericalExpression(line); + return s; +} + +struct Grid_ND { + using Position = std::vector; + + struct iterator { + iterator(const Position& _pos, const Position& _limits) : pos(_pos), limits(&_limits) {} + + bool operator==(const iterator& other) const { + if(pos.size() != other.pos.size()) return false; + for(size_t n = 0; n < pos.size(); ++n) { + if(pos.at(n) != other.pos.at(n)) return false; + } + return true; + } + + bool operator!=(const iterator& other) { return !(*this == other); } + + iterator& operator++() + { + ++pos.at(0); + for(size_t n = 0; n < pos.size() - 1 && pos.at(n) >= limits->at(n); ++n) { + ++pos.at(n+1); + pos.at(n) = 0; + } + return *this; + } + + iterator operator++(int) + { + iterator cp(*this); + ++(*this); + return cp; + } + + const Position& operator*() const { return pos; } + const Position* operator->() const { return &pos; } + + private: + Position pos; + const Position* limits; + }; + + explicit Grid_ND(const Position& _limits) : limits(_limits) + { + if(!limits.size()) + throw exception("Grid dimensions should be > 0"); + for(size_t limit : limits) { + if(!limit) + throw exception("Grid range limit should be > 0."); + } + } + + iterator begin() const + { + Position pos; + pos.assign(limits.size(), 0); + return iterator(pos, limits); + } + + iterator end() const + { + Position pos; + pos.assign(limits.size(), 0); + pos.back() = limits.back(); + return iterator(pos, limits); + } + +private: + Position limits; +}; + +} // namespace analysis diff --git a/Common/interface/PatHelpers.h b/Common/interface/PatHelpers.h new file mode 100644 index 00000000000..5f104ec3859 --- /dev/null +++ b/Common/interface/PatHelpers.h @@ -0,0 +1,39 @@ +/*! Various utility functions. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include "DataFormats/PatCandidates/interface/Jet.h" +#include "DataFormats/PatCandidates/interface/Muon.h" +#include "DataFormats/PatCandidates/interface/Tau.h" +#include "FWCore/ParameterSet/interface/ParameterSet.h" +#include "TauTriggerTools/Common/interface/GenTruthTools.h" + +namespace tau_trigger { + +using namespace analysis; + +double MuonIsolation(const pat::Muon& muon); + +template +double Calculate_MT(const LVector1& lepton_p4, const LVector2& met_p4) +{ + const double delta_phi = ROOT::Math::VectorUtil::DeltaPhi(lepton_p4, met_p4); + return std::sqrt( 2.0 * lepton_p4.Pt() * met_p4.Pt() * ( 1.0 - std::cos(delta_phi) ) ); +} + +struct TauEntry { + const pat::Tau* reco_tau{nullptr}; + gen_truth::LeptonMatchResult gen_tau; + unsigned selection{0}; +}; + +std::vector CollectTaus(const LorentzVectorM& muon_p4, const pat::TauCollection& taus, + const std::vector& genLeptons, double deltaR2Thr); + +bool PassBtagVeto(const LorentzVectorM& muon_p4, const LorentzVectorM& tau_p4, const pat::JetCollection& jets, + double btagThreshold, double deltaR2Thr); +gen_truth::LeptonMatchResult SelectGenLeg(const std::vector& genLeptons, bool is_tau); + +} // namespace tau_trigger diff --git a/Common/interface/RootExt.h b/Common/interface/RootExt.h new file mode 100644 index 00000000000..c02e97f7668 --- /dev/null +++ b/Common/interface/RootExt.h @@ -0,0 +1,110 @@ +/*! Common CERN ROOT extensions. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "exception.h" + +namespace root_ext { + +std::shared_ptr CreateRootFile(const std::string& file_name, + ROOT::ECompressionAlgorithm compression = ROOT::kZLIB, + int compression_level = 9); +std::shared_ptr OpenRootFile(const std::string& file_name); + +void WriteObject(const TObject& object, TDirectory* dir, const std::string& name = ""); + +template +void WriteObject(const Object& object) +{ + TDirectory* dir = object.GetDirectory(); + WriteObject(object, dir); +} + + +template +Object* ReadObject(TDirectory& file, const std::string& name) +{ + if(!name.size()) + throw analysis::exception("Can't read nameless object."); + TObject* root_object = file.Get(name.c_str()); + if(!root_object) + throw analysis::exception("Object '%1%' not found in '%2%'.") % name % file.GetName(); + Object* object = dynamic_cast(root_object); + if(!object) + throw analysis::exception("Wrong object type '%1%' for object '%2%' in '%3%'.") % typeid(Object).name() + % name % file.GetName(); + return object; +} + +template +Object* TryReadObject(TDirectory& file, const std::string& name) +{ + try { + return ReadObject(file, name); + } catch(analysis::exception&) {} + return nullptr; +} + +template +Object* CloneObject(const Object& original_object, const std::string& new_name = "") +{ + const std::string new_object_name = new_name.size() ? new_name : original_object.GetName(); + Object* new_object = dynamic_cast(original_object.Clone(new_object_name.c_str())); + if(!new_object) + throw analysis::exception("Type error while cloning object '%1%'.") % original_object.GetName(); + return new_object; +} + +template +Object* CloneObject(const Object& original_object, const std::string& new_name, bool detach_from_file) +{ + Object* new_object = CloneObject(original_object, new_name); + if(detach_from_file) + new_object->SetDirectory(nullptr); + return new_object; +} + +template +Object* ReadCloneObject(TDirectory& file, const std::string& original_name, const std::string& new_name = "", + bool detach_from_file = false) +{ + Object* original_object = ReadObject(file, original_name); + return CloneObject(*original_object, new_name, detach_from_file); +} + +TDirectory* GetDirectory(TDirectory& root_dir, const std::string& name, bool create_if_needed = true); + +enum class ClassInheritance { TH1, TTree, TDirectory }; + +ClassInheritance FindClassInheritance(const std::string& class_name); + +struct WarningSuppressor { + const Int_t old_ignore_level; + WarningSuppressor(Int_t ignore_level) + : old_ignore_level(gErrorIgnoreLevel) + { + gErrorIgnoreLevel = ignore_level; + } + ~WarningSuppressor() + { + gErrorIgnoreLevel = old_ignore_level; + } +}; + +} // namespace root_ext + + +std::ostream& operator<<(std::ostream& s, const TVector3& v); +std::ostream& operator<<(std::ostream& s, const TLorentzVector& v); +std::ostream& operator<<(std::ostream& s, const TMatrixD& matrix); diff --git a/Common/interface/SmartHistogram.h b/Common/interface/SmartHistogram.h new file mode 100644 index 00000000000..e6067f6f78a --- /dev/null +++ b/Common/interface/SmartHistogram.h @@ -0,0 +1,452 @@ +/*! Definition of class SmartHistogram that allows to create ROOT-compatible histograms. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "RootExt.h" +#include "TextIO.h" +#include "NumericPrimitives.h" + +namespace root_ext { + +class AbstractHistogram { +public: + AbstractHistogram(const std::string& _name) + : name(_name), outputDirectory(nullptr) {} + AbstractHistogram(const AbstractHistogram& other) : name(other.name), outputDirectory(other.outputDirectory) {} + virtual ~AbstractHistogram() {} + + virtual void WriteRootObject() = 0; + virtual void SetOutputDirectory(TDirectory* directory) { outputDirectory = directory; } + + TDirectory* GetOutputDirectory() const { return outputDirectory; } + const std::string& Name() const { return name; } + virtual void SetName(const std::string& _name) { name = _name; } + +private: + std::string name; + TDirectory* outputDirectory; +}; + +namespace detail { + +template +class Base1DHistogram : public AbstractHistogram { +public: + using const_iterator = typename std::deque::const_iterator; + using RootContainer = TTree; + + Base1DHistogram(const std::string& name) : AbstractHistogram(name) {} + + const std::deque& Data() const { return data; } + size_t size() const { return data.size(); } + const_iterator begin() const { return data.begin(); } + const_iterator end() const { return data.end(); } + + void Fill(const ValueType& value) + { + data.push_back(value); + } + + virtual void WriteRootObject() + { + if(!GetOutputDirectory()) return; + std::unique_ptr rootTree(new TTree(Name().c_str(), Name().c_str())); + rootTree->SetDirectory(GetOutputDirectory()); + ValueType branch_value; + rootTree->Branch("values", &branch_value); + for(const ValueType& value : data) { + branch_value = value; + rootTree->Fill(); + } + root_ext::WriteObject(*rootTree); + } + + void CopyContent(TTree& rootTree) + { + data.clear(); + ValueType branch_value; + TBranch* branch; + rootTree.SetBranchAddress("values", &branch_value, &branch); + Long64_t N = rootTree.GetEntries(); + for(Long64_t n = 0; n < N; ++n) { + rootTree.GetEntry(n); + data.push_back(branch_value); + } + rootTree.ResetBranchAddress(branch); + } + +private: + std::deque data; +}; + +template +class Base2DHistogram : public AbstractHistogram { +public: + struct Value { + NumberType x, y; + Value() {} + Value(NumberType _x, NumberType _y) : x(_x), y(_y) {} + }; + + using const_iterator = typename std::deque::const_iterator; + using RootContainer = TTree; + + Base2DHistogram(const std::string& name) : AbstractHistogram(name) {} + + const std::deque& Data() const { return data; } + size_t size() const { return data.size(); } + const_iterator begin() const { return data.begin(); } + const_iterator end() const { return data.end(); } + + void Fill(const NumberType& x, const NumberType& y) + { + data.push_back(Value(x, y)); + } + + virtual void WriteRootObject() + { + if(!GetOutputDirectory()) return; + std::unique_ptr rootTree(new TTree(Name().c_str(), Name().c_str())); + rootTree->SetDirectory(GetOutputDirectory()); + NumberType branch_value_x, branch_value_y; + rootTree->Branch("x", &branch_value_x); + rootTree->Branch("y", &branch_value_y); + for(const Value& value : data) { + branch_value_x = value.x; + branch_value_y = value.y; + rootTree->Fill(); + } + root_ext::WriteObject(*rootTree); + } + + void CopyContent(TTree& rootTree) + { + data.clear(); + NumberType branch_value_x, branch_value_y; + TBranch *branch_x, *branch_y; + rootTree.SetBranchAddress("x", &branch_value_x, &branch_x); + rootTree.SetBranchAddress("y", &branch_value_y, &branch_y); + Long64_t N = rootTree.GetEntries(); + for(Long64_t n = 0; n < N; ++n) { + rootTree.GetEntry(n); + data.push_back(Value(branch_value_x, branch_value_y)); + } + rootTree.ResetBranchAddress(branch_x); + rootTree.ResetBranchAddress(branch_y); + } + +private: + std::deque data; +}; + +} // namespace detail + +template +class SmartHistogram; + +template<> +class SmartHistogram : public detail::Base1DHistogram { +public: + SmartHistogram(const std::string& name) : Base1DHistogram(name) {} +}; + +template<> +class SmartHistogram : public detail::Base1DHistogram { +public: + SmartHistogram(const std::string& name) : Base1DHistogram(name) {} +}; + +template<> +class SmartHistogram : public detail::Base1DHistogram { +public: + SmartHistogram(const std::string& name) : Base1DHistogram(name) {} +}; + +template<> +class SmartHistogram : public detail::Base1DHistogram { +public: + SmartHistogram(const std::string& name) : Base1DHistogram(name) {} +}; + +template<> +class SmartHistogram : public detail::Base1DHistogram { +public: + SmartHistogram(const std::string& name) : Base1DHistogram(name) {} +}; + +template<> +class SmartHistogram< detail::Base2DHistogram::Value > : public detail::Base2DHistogram { +public: + SmartHistogram(const std::string& name) : Base2DHistogram(name) {} +}; + +template<> +class SmartHistogram< detail::Base2DHistogram::Value > : public detail::Base2DHistogram { +public: + SmartHistogram(const std::string& name) : Base2DHistogram(name) {} +}; + +template<> +class SmartHistogram< detail::Base2DHistogram::Value > : public detail::Base2DHistogram { +public: + SmartHistogram(const std::string& name) : Base2DHistogram(name) {} +}; + +template<> +class SmartHistogram< detail::Base2DHistogram::Value > : public detail::Base2DHistogram { +public: + SmartHistogram(const std::string& name) : Base2DHistogram(name) {} +}; + +template<> +class SmartHistogram : public TH1D, public AbstractHistogram { +public: + using RootContainer = TH1D; + using Range = ::analysis::Range; + using MultiRange = ::analysis::MultiRange; + + SmartHistogram(const std::string& name, int nbins, double low, double high) + : TH1D(name.c_str(), name.c_str(), nbins, low, high), AbstractHistogram(name), store(true), + use_log_y(false), max_y_sf(1), divide_by_bin_width(false) {} + + SmartHistogram(const std::string& name, const std::vector& bins) + : TH1D(name.c_str(), name.c_str(), static_cast(bins.size()) - 1, bins.data()), AbstractHistogram(name), + store(true), use_log_y(false), max_y_sf(1), divide_by_bin_width(false) {} + + SmartHistogram(const std::string& name, int nbins, double low, double high, const std::string& x_axis_title, + const std::string& y_axis_title, bool _use_log_y, double _max_y_sf, bool _divide_by_bin_width, + bool _store) + : TH1D(name.c_str(), name.c_str(), nbins, low, high), AbstractHistogram(name), store(_store), + use_log_y(_use_log_y), max_y_sf(_max_y_sf), divide_by_bin_width(_divide_by_bin_width) + { + SetXTitle(x_axis_title.c_str()); + SetYTitle(y_axis_title.c_str()); + } + + SmartHistogram(const std::string& name, const std::vector& bins, const std::string& x_axis_title, + const std::string& y_axis_title, bool _use_log_y, double _max_y_sf, bool _divide_by_bin_width, + bool _store) + : TH1D(name.c_str(), name.c_str(), static_cast(bins.size()) - 1, bins.data()), AbstractHistogram(name), + store(_store), use_log_y(_use_log_y), max_y_sf(_max_y_sf), divide_by_bin_width(_divide_by_bin_width) + { + SetXTitle(x_axis_title.c_str()); + SetYTitle(y_axis_title.c_str()); + } + + SmartHistogram(const TH1D& other, bool _use_log_y, double _max_y_sf, bool _divide_by_bin_width) + : TH1D(other), AbstractHistogram(other.GetName()), store(false), use_log_y(_use_log_y), max_y_sf(_max_y_sf), + divide_by_bin_width(_divide_by_bin_width) {} + + virtual void SetName(const char* _name) override + { + TH1D::SetName(_name); + AbstractHistogram::SetName(_name); + } + + virtual void SetName(const std::string& _name) override + { + TH1D::SetName(_name.c_str()); + AbstractHistogram::SetName(_name); + } + + virtual void WriteRootObject() override + { + if(store && GetOutputDirectory()) + root_ext::WriteObject(*this); + } + + virtual void SetOutputDirectory(TDirectory* directory) override + { + TDirectory* dir = store ? directory : nullptr; + AbstractHistogram::SetOutputDirectory(dir); + SetDirectory(dir); + } + + bool UseLogX() const { return use_log_x; } + bool UseLogY() const { return use_log_y; } + double MaxYDrawScaleFactor() const { return max_y_sf; } + double MinYDrawScaleFactor() const { return min_y_sf; } + std::string GetXTitle() const { return GetXaxis()->GetTitle(); } + std::string GetYTitle() const { return GetYaxis()->GetTitle(); } + bool NeedToDivideByBinWidth() const { return divide_by_bin_width; } + void SetLegendTitle(const std::string _legend_title) { legend_title = _legend_title; } + const std::string& GetLegendTitle() const { return legend_title; } + const MultiRange GetBlindRanges() const { return blind_ranges; } + + bool TryGetMinY(double& _y_min) const + { + if(!y_min) return false; + _y_min = *y_min; + return true; + } + + double GetSystematicUncertainty() const { return syst_unc; } + void SetSystematicUncertainty(double _syst_unc) { syst_unc = _syst_unc; } + double GetPostfitScaleFactor() const { return postfit_sf; } + void SetPostfitScaleFactor(double _postfit_sf) { postfit_sf = _postfit_sf; } + + void CopyContent(const TH1& other) + { + if(other.GetNbinsX() != GetNbinsX()) + throw analysis::exception("Unable to copy histogram content: source and destination have different number" + " of bins."); + for(Int_t n = 0; n <= other.GetNbinsX() + 1; ++n) { + if(GetBinLowEdge(n) != other.GetBinLowEdge(n) || GetBinWidth(n) != other.GetBinWidth(n)) + throw analysis::exception("Unable to copy histogram content from histogram '%1%' into '%2%':" + " bin %3% is not compatible between the source and destination." + " (LowEdge, Width): (%4%, %5%) != (%6%, %7%).") + % other.GetName() % Name() % n % other.GetBinLowEdge(n) % other.GetBinWidth(n) % GetBinLowEdge(n) + % GetBinWidth(n); + SetBinContent(n, other.GetBinContent(n)); + SetBinError(n, other.GetBinError(n)); + } + } + + void AddHistogram(const SmartHistogram& other) + { + const double integral = Integral(), other_integral = other.Integral(), tot_integral = integral + other_integral; + const double post_integral = postfit_sf * integral, other_post_integral = other.postfit_sf * other_integral, + tot_post_integral = post_integral + other_post_integral; + if(tot_integral != 0) { + postfit_sf = tot_post_integral / tot_integral; + syst_unc = std::hypot(syst_unc * post_integral, other.syst_unc * other_post_integral) / tot_post_integral; + } + Add(&other, 1); + } + +private: + bool store{true}; + bool use_log_x{false}, use_log_y{false}; + double max_y_sf{1}, min_y_sf{1}; + boost::optional y_min; + bool divide_by_bin_width{false}; + std::string legend_title; + MultiRange blind_ranges; + double syst_unc{0}, postfit_sf{1}; +}; + +template<> +class SmartHistogram : public TH2D, public AbstractHistogram { +public: + using RootContainer = TH2D; + + SmartHistogram(const std::string& name, + int nbinsx, double xlow, double xup, + int nbinsy, double ylow, double yup) + : TH2D(name.c_str(), name.c_str(), nbinsx, xlow, xup, nbinsy, ylow, yup), + AbstractHistogram(name), store(true), use_log_y(false), max_y_sf(1) {} + + SmartHistogram(const std::string& name, int nbinsx, double xlow, double xup, int nbinsy, double ylow, + double yup, const std::string& x_axis_title, const std::string& y_axis_title, bool _use_log_y, + double _max_y_sf, bool _store) + : TH2D(name.c_str(), name.c_str(), nbinsx, xlow, xup, nbinsy, ylow, yup), + AbstractHistogram(name), store(_store), use_log_y(_use_log_y), max_y_sf(_max_y_sf) + { + SetXTitle(x_axis_title.c_str()); + SetYTitle(y_axis_title.c_str()); + } + + SmartHistogram(const std::string& name, const std::vector& binsx, const std::vector& binsy) + : TH2D(name.c_str(), name.c_str(), static_cast(binsx.size()) - 1, binsx.data(), + static_cast(binsy.size()) - 1, binsy.data()), AbstractHistogram(name), + store(true), use_log_y(false), max_y_sf(1) {} + + virtual void WriteRootObject() override + { + if(store && GetOutputDirectory()) + root_ext::WriteObject(*this); + } + + virtual void SetName(const char* _name) override + { + TH2D::SetName(_name); + AbstractHistogram::SetName(_name); + } + + virtual void SetName(const std::string& _name) override + { + TH2D::SetName(_name.c_str()); + AbstractHistogram::SetName(_name); + } + + virtual void SetOutputDirectory(TDirectory* directory) override + { + TDirectory* dir = store ? directory : nullptr; + AbstractHistogram::SetOutputDirectory(dir); + SetDirectory(dir); + } + + bool UseLogY() const { return use_log_y; } + double MaxYDrawScaleFactor() const { return max_y_sf; } + std::string GetXTitle() const { return GetXaxis()->GetTitle(); } + std::string GetYTitle() const { return GetYaxis()->GetTitle(); } + + void CopyContent(const TH2D& other) + { + if(other.GetNbinsX() != GetNbinsX() || other.GetNbinsY() != GetNbinsY()) + throw analysis::exception("Unable to copy histogram content: source and destination have different number" + " of bins."); + for(Int_t n = 0; n <= GetNbinsX() + 1; ++n) { + for(Int_t k = 0; k <= GetNbinsY() + 1; ++k) { + if(GetXaxis()->GetBinLowEdge(n) != other.GetXaxis()->GetBinLowEdge(n) + || GetXaxis()->GetBinWidth(n) != other.GetXaxis()->GetBinWidth(n) + || GetYaxis()->GetBinLowEdge(k) != other.GetYaxis()->GetBinLowEdge(k) + || GetYaxis()->GetBinWidth(k) != other.GetYaxis()->GetBinWidth(k)) + throw analysis::exception("Unable to copy histogram content: bin (%1%, %2% is not compatible between" + " the source and destination.") % n % k; + SetBinContent(n, k, other.GetBinContent(n, k)); + SetBinError(n, k, other.GetBinError(n, k)); + } + } + } + +private: + bool store; + bool use_log_y; + double max_y_sf; +}; + +template<> +class SmartHistogram : public AbstractHistogram { +public: + using DataVector = std::vector; + using RootContainer = TGraph; + using AbstractHistogram::AbstractHistogram; + + + void AddPoint(double x, double y) + { + x_vector.push_back(x); + y_vector.push_back(y); + } + + const DataVector& GetXvalues() const { return x_vector; } + const DataVector& GetYvalues() const { return y_vector; } + + virtual void WriteRootObject() override + { + std::unique_ptr graph(new TGraph(static_cast(x_vector.size()), x_vector.data(), y_vector.data())); + if(GetOutputDirectory()) + root_ext::WriteObject(*graph, GetOutputDirectory(), Name()); + } + +private: + DataVector x_vector, y_vector; +}; + +} // root_ext diff --git a/Common/interface/SmartTree.h b/Common/interface/SmartTree.h index 72b668eb1d1..bcae2bc6558 100644 --- a/Common/interface/SmartTree.h +++ b/Common/interface/SmartTree.h @@ -256,8 +256,10 @@ class SmartTree { void Write() { std::lock_guard lock(mutex); - if(directory) + if(directory) { + tree->FlushBaskets(); directory->WriteTObject(tree, tree->GetName(), "Overwrite"); + } } Mutex& GetMutex() { return mutex; } diff --git a/Common/interface/TauIdResults.h b/Common/interface/TauIdResults.h index e2bd590a73a..eb83bf33a46 100644 --- a/Common/interface/TauIdResults.h +++ b/Common/interface/TauIdResults.h @@ -44,29 +44,19 @@ struct TauIdResults { #define TAU_IDS() \ TAU_ID(againstElectronMVA6, "againstElectron{wp}MVA6{Raw}", true, "VLoose Loose Medium Tight VTight") \ - TAU_ID(againstElectronMVA62018, "againstElectron{wp}MVA6{Raw}2018", true, "VLoose Loose Medium Tight VTight") \ TAU_ID(againstMuon3, "againstMuon{wp}3", false, "Loose Tight") \ TAU_ID(byCombinedIsolationDeltaBetaCorr3Hits, "by{wp}CombinedIsolationDeltaBetaCorr{Raw}3Hits", true, \ "Loose Medium Tight") \ - TAU_ID(byIsolationMVArun2v1DBoldDMwLT2016, "by{wp}IsolationMVArun2v1DBoldDMwLT{raw}2016", true, \ - "VLoose Loose Medium Tight VTight VVTight") \ - TAU_ID(byIsolationMVArun2v1DBnewDMwLT2016, "by{wp}IsolationMVArun2v1DBnewDMwLT{raw}2016", true, \ - "VLoose Loose Medium Tight VTight VVTight") \ TAU_ID(byIsolationMVArun2017v2DBoldDMwLT2017, "by{wp}IsolationMVArun2017v2DBoldDMwLT{raw}2017", true, \ "VVLoose VLoose Loose Medium Tight VTight VVTight") \ - TAU_ID(byIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "by{wp}IsolationMVArun2017v2DBoldDMdR0p3wLT{raw}2017", true, \ - "VVLoose VLoose Loose Medium Tight VTight VVTight") \ - TAU_ID(byIsolationMVArun2017v2DBnewDMwLT2017, "by{wp}IsolationMVArun2017v2DBnewDMwLT{raw}2017", true, \ - "VVLoose VLoose Loose Medium Tight VTight VVTight") \ - TAU_ID(byDeepTau2017v2VSe, "by{wp}DeepTau2017v2VSe{raw}", true, \ + TAU_ID(byDeepTau2017v2p1VSe, "by{wp}DeepTau2017v2p1VSe{raw}", true, \ "VVVLoose VVLoose VLoose Loose Medium Tight VTight VVTight") \ - TAU_ID(byDeepTau2017v2VSmu, "by{wp}DeepTau2017v2VSmu{raw}", true, \ + TAU_ID(byDeepTau2017v2p1VSmu, "by{wp}DeepTau2017v2p1VSmu{raw}", true, \ "VLoose Loose Medium Tight") \ - TAU_ID(byDeepTau2017v2VSjet, "by{wp}DeepTau2017v2VSjet{raw}", true, \ + TAU_ID(byDeepTau2017v2p1VSjet, "by{wp}DeepTau2017v2p1VSjet{raw}", true, \ "VVVLoose VVLoose VLoose Loose Medium Tight VTight VVTight") \ /**/ - #define TAU_ID(name, pattern, has_raw, wp_list) name, enum class TauIdDiscriminator { TAU_IDS() }; #undef TAU_ID @@ -129,12 +119,14 @@ struct TauIdDescriptor { const std::string& raw_suffix = "raw") const { const std::string disc_name = ::analysis::ToString(discriminator); - if(has_raw) - tuple.template get(prefix + disc_name + raw_suffix) = tau ? tau->tauID(raw_name) : default_value; + if(has_raw) { + const float value = tau && tau->isTauIDAvailable(raw_name) ? tau->tauID(raw_name) : default_value; + tuple.template get(prefix + disc_name + raw_suffix) = value; + } if(!working_points.empty()) { TauIdResults id_results; for(const auto& wp_entry : working_points) { - const bool result = tau && tau->tauID(wp_entry.second) > 0.5; + const bool result = tau && tau->isTauIDAvailable(wp_entry.second) && tau->tauID(wp_entry.second) > 0.5f; id_results.SetResult(wp_entry.first, result); } tuple.template get(prefix + disc_name) = id_results.GetResultBits(); diff --git a/Common/interface/TextIO.h b/Common/interface/TextIO.h index 5a918d697c2..2284b9bed9a 100644 --- a/Common/interface/TextIO.h +++ b/Common/interface/TextIO.h @@ -8,6 +8,7 @@ This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ #include #include #include + namespace analysis { namespace detail { @@ -85,140 +86,48 @@ std::string CollectionToString(const Collection& col, const std::string& separat return ss.str(); } -inline std::string RemoveFileExtension(const std::string& file_name) -{ - return boost::filesystem::change_extension(file_name, "").string(); -} +std::string RemoveFileExtension(const std::string& file_name); +std::string GetFileNameWithoutPath(const std::string& file_name); -inline std::string GetFileNameWithoutPath(const std::string& file_name) -{ - const size_t lastindex = file_name.find_last_of("/"); - if(lastindex == std::string::npos) - return file_name; - else - return file_name.substr(lastindex+1); -} +std::vector SplitValueList(const std::string& values_str, bool allow_duplicates = true, + const std::string& separators = " \t", + bool enable_token_compress = true); -inline std::vector SplitValueList(std::string values_str, bool allow_duplicates = true, - const std::string& separators = " \t", - bool enable_token_compress = true) +template> +Collection SplitValueListT(const std::string& values_str, bool allow_duplicates = true, + const std::string& separators = " \t", + bool enable_token_compress = true) { - std::vector result; - if(enable_token_compress) - boost::trim_if(values_str, boost::is_any_of(separators)); - if(!values_str.size()) return result; - const auto token_compress = enable_token_compress ? boost::algorithm::token_compress_on - : boost::algorithm::token_compress_off; - boost::split(result, values_str, boost::is_any_of(separators), token_compress); - if(!allow_duplicates) { - std::unordered_set set_result; - for(const std::string& value : result) { - if(set_result.count(value)) - throw exception("Value '%1%' listed more than once in the value list '%2%'.") % value % values_str; - set_result.insert(value); - } - } - return result; + std::vector list = SplitValueList(values_str,allow_duplicates,separators,enable_token_compress); + Collection collection; + std::transform(list.begin(), list.end(), std::inserter(collection, collection.end()), [](const std::string& str) { return Parse(str);}); + return collection; } -inline std::vector ReadValueList(std::istream& stream, size_t number_of_items, - bool allow_duplicates = true, - const std::string& separators = " \t", - bool enable_token_compress = true) -{ - const auto stream_exceptions = stream.exceptions(); - stream.exceptions(std::istream::goodbit); - try { - std::vector result; - std::unordered_set set_result; - const auto predicate = boost::is_any_of(separators); - size_t n = 0; - for(; n < number_of_items; ++n) { - std::string value; - while(true) { - const auto c = stream.get(); - if(!stream.good()) { - if(stream.eof()) break; - throw exception("Failed to read values from stream."); - } - if(predicate(c)) { - if(!value.size() && enable_token_compress) continue; - break; - } - value.push_back(static_cast(c)); - } - if(!allow_duplicates && set_result.count(value)) - throw exception("Value '%1%' listed more than once in the input stream.") % value; - result.push_back(value); - set_result.insert(value); - } - if(n != number_of_items) - throw exception("Expected %1% items, while read only %2%.") % number_of_items % n; - - stream.clear(); - stream.exceptions(stream_exceptions); - return result; - } catch(exception&) { - stream.clear(); - stream.exceptions(stream_exceptions); - throw; - } -} +std::vector ReadValueList(std::istream& stream, size_t number_of_items, + bool allow_duplicates = true, + const std::string& separators = " \t", + bool enable_token_compress = true); struct StVariable { using ValueType = double; static constexpr int max_precision = -std::numeric_limits::digits10; static constexpr int number_of_significant_digits_in_error = 2; - ValueType value, error_up, error_low; + ValueType value, error_up, error_low; - StVariable() : value(0), error_up(0), error_low(0) {} - StVariable(double _value, double _error_up, double _error_low = std::numeric_limits::quiet_NaN()) : - value(_value), error_up(_error_up), error_low(_error_low) {} + StVariable(); + StVariable(double _value, double _error_up, double _error_low = std::numeric_limits::quiet_NaN()); - int precision_up() const - { - return error_up != 0. - ? static_cast(std::floor(std::log10(error_up)) - number_of_significant_digits_in_error + 1) - : max_precision; - } + int precision_up() const; + int precision_low() const; + int precision() const; - int precision_low() const - { - return error_low != 0. - ? static_cast(std::floor(std::log10(error_low)) - number_of_significant_digits_in_error + 1) - : max_precision; - } + int decimals_to_print_low() const; + int decimals_to_print_up() const; + int decimals_to_print() const; - int precision() const { return std::max(precision_up(), precision_low()); } - - int decimals_to_print_low() const { return std::max(0, -precision_low()); } - int decimals_to_print_up() const { return std::max(0, -precision_up()); } - int decimals_to_print() const { return std::min(decimals_to_print_low(), decimals_to_print_up()); } - - std::string ToLatexString() const - { - const ValueType ten_pow_p = std::pow(10.0, precision()); - const ValueType value_rounded = std::round(value / ten_pow_p) * ten_pow_p; - const ValueType error_up_rounded = std::ceil(error_up / ten_pow_p) * ten_pow_p; - const ValueType error_low_rounded = std::ceil(error_low / ten_pow_p) * ten_pow_p; - - std::ostringstream ss; - ss << std::setprecision(decimals_to_print()) << std::fixed; - if(error_up == 0 && error_low == 0) - ss << value_rounded<< "^{+0}_{-0}"; - else if(!std::isnan(error_low)) - ss << value_rounded<< "^{+" << error_up_rounded << "}_{-" << error_low_rounded << "}"; - else if(std::isnan(error_low)) { - ss << value_rounded << " \\pm "; - if(error_up == 0) - ss << "0"; - else - ss << error_up_rounded; - } - - return ss.str(); - } + std::string ToLatexString() const; }; } // namespace analysis diff --git a/Common/interface/Tools.h b/Common/interface/Tools.h new file mode 100644 index 00000000000..ba4e44e830d --- /dev/null +++ b/Common/interface/Tools.h @@ -0,0 +1,126 @@ +/*! Common tools and definitions suitable for general purposes. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +namespace analysis { + +namespace tools { + +template +std::vector join_vectors(const std::vector< const std::vector* >& inputVectors) +{ + size_t totalSize = 0; + for(auto inputVector : inputVectors) { + if(!inputVector) + throw std::runtime_error("input vector is nullptr"); + totalSize += inputVector->size(); + } + + std::vector result; + result.reserve(totalSize); + for(auto inputVector : inputVectors) + result.insert(result.end(), inputVector->begin(), inputVector->end()); + + return result; +} + +template +void put_back(std::vector& /*v*/) { } + +template +void put_back(std::vector& v, const T2& t, const Args&... args); + +template +void put_back(std::vector& v, const std::vector& v2, const Args&... args) +{ + v.insert(v.end(), v2.begin(), v2.end()); + put_back(v, args...); +} + +template +void put_back(std::vector& v, const T2& t, const Args&... args) +{ + v.push_back(t); + put_back(v, args...); +} + +template +std::vector join(const Type& t, const Args&... args) +{ + std::vector result; + put_back(result, t, args...); + return result; +} + +template +std::vector join(const std::vector& v, const Args&... args) +{ + std::vector result; + put_back(result, v, args...); + return result; +} + +template +std::set union_sets(std::initializer_list> sets) +{ + std::set result; + for(const auto& set : sets) + result.insert(set.begin(), set.end()); + return result; +} + +template +size_t find_index(const Container& container, const T& value) +{ + const auto iter = std::find(container.begin(), container.end(), value); + return std::distance(container.begin(), iter); +} + +template> +Set collect_map_keys(const Map& map) +{ + Set result; + std::transform(map.begin(), map.end(), std::inserter(result, result.end()), + [](const typename Map::value_type& pair) { return pair.first; } ); + return result; +} + +template> +Set collect_map_values(const Map& map) +{ + Set result; + std::transform(map.begin(), map.end(), std::inserter(result, result.end()), + [](const typename Map::value_type& pair) { return pair.second; } ); + return result; +} + +inline uint32_t hash(const std::string& str) +{ + boost::crc_32_type crc; + crc.process_bytes(str.data(), str.size()); + return crc.checksum(); +} + +inline std::string FullPath(std::initializer_list paths) +{ + if(!paths.size()) + return ""; + + std::ostringstream full_path; + auto iter = paths.begin(); + full_path << *iter++; + for(; iter != paths.end(); ++iter) + full_path << "/" << *iter; + return full_path.str(); +} + +} // namespace tools +} // namespace analysis \ No newline at end of file diff --git a/Common/interface/TriggerDescriptor.h b/Common/interface/TriggerDescriptor.h new file mode 100644 index 00000000000..72bab57cb09 --- /dev/null +++ b/Common/interface/TriggerDescriptor.h @@ -0,0 +1,77 @@ +/*! Definition of trigger results. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include +#include + +#include "DataFormats/L1Trigger/interface/Tau.h" +#include "DataFormats/PatCandidates/interface/TriggerObjectStandAlone.h" +#include "FWCore/ParameterSet/interface/ParameterSet.h" + +#include "AnalysisTypes.h" + +namespace tau_trigger { + +struct TriggerLeg { + analysis::LegType type; + std::vector filters; +}; + +struct TriggerDescriptor { + std::string path; + boost::regex regex; + int global_index{-1}; + std::vector legs; + bool is_tag{false}; + unsigned type_mask{0}; +}; + +unsigned GetTriggerObjectTypes(const pat::TriggerObjectStandAlone& triggerObject); +const l1t::Tau* MatchL1Taus(const analysis::LorentzVectorM& ref_p4, const BXVector& l1Taus, double deltaR2Thr, + int bx_value); + + +using TriggerBitsContainer = unsigned long long; +constexpr size_t MaxNumberOfTriggers = std::numeric_limits::digits; +using TriggerResults = std::bitset; + +struct TriggerObjectMatchResult { + size_t hltObjIndex; + unsigned objType; + + TriggerResults hasPathName, isBestMatch; + std::set descIndices; + std::vector filters; +}; + +struct FullTriggerResults { + TriggerResults accept, acceptAndMatch; + std::map matchResults; +}; + +class TriggerDescriptorCollection { +public: + TriggerDescriptorCollection(const edm::VParameterSet& trig_pset); + const std::vector& getDescriptors() const { return descs; } + const TriggerDescriptor& at(size_t n) const { return descs.at(n); } + size_t getIndex(const std::string& path) const { return desc_indices.at(path); } + size_t size() const { return descs.size(); } + const std::set& getTagDescriptorsIndices() const { return tag_desc_indices; } + + FullTriggerResults matchTriggerObjects(const edm::TriggerResults& triggerResults, + const pat::TriggerObjectStandAloneCollection& triggerObjects, + const analysis::LorentzVectorM& ref_p4, + const std::vector& triggerNames, double deltaR2Thr, + bool include_tag_paths, bool include_nontag_paths); + + void updateGlobalIndices(const std::vector& triggerNames); + +private: + std::vector descs; + std::map desc_indices; + std::set tag_desc_indices; +}; + +} diff --git a/Common/interface/exception.h b/Common/interface/exception.h index e2da3871b93..8bee88db280 100644 --- a/Common/interface/exception.h +++ b/Common/interface/exception.h @@ -3,49 +3,20 @@ This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ #pragma once -#include -#include +#include #include +#include namespace analysis { class exception : public std::exception { public: - explicit exception(const std::string& message) noexcept : msg_valid(false), f_str(message) - { - try { - f_msg = std::make_unique(f_str); - f_msg->exceptions(boost::io::all_error_bits); - } catch(boost::io::bad_format_string&) { - msg = "bad formatted error message = '" + f_str + "'."; - msg_valid = true; - } - } - - exception(const exception& e) noexcept : msg(e.msg), msg_valid(e.msg_valid), f_str(e.f_str) - { - if(e.f_msg) - f_msg = std::make_unique(*e.f_msg); - } - - exception(exception&& e) noexcept : msg(e.msg), msg_valid(e.msg_valid), f_msg(std::move(e.f_msg)), f_str(e.f_str) {} + explicit exception(const std::string& message) noexcept; + exception(const exception& e) noexcept; + exception(exception&& e) noexcept; virtual ~exception() noexcept override {} - virtual const char* what() const noexcept override { return message().c_str(); } - - const std::string& message() const noexcept - { - if(!msg_valid) { - try { - msg = boost::str(*f_msg); - } catch(boost::io::too_few_args&) { - msg = "too few arguments are provided to the error message = '" + f_str + "'."; - } catch(std::exception& e) { - process_unexpected_exception(e); - } - msg_valid = true; - } - return msg; - } + virtual const char* what() const noexcept override; + const std::string& message() const noexcept; template exception& operator % (const T& t) noexcept @@ -63,12 +34,7 @@ class exception : public std::exception { } private: - void process_unexpected_exception(const std::exception& e) const - { - msg = "An exception has been raised while creating an error message. Error message = '" + f_str + - "'. Exception message = '" + e.what() + "'."; - msg_valid = true; - } + void process_unexpected_exception(const std::exception& e) const; private: mutable std::string msg; diff --git a/Common/python/AnalysisTools.py b/Common/python/AnalysisTools.py new file mode 100644 index 00000000000..ac20c02e7bc --- /dev/null +++ b/Common/python/AnalysisTools.py @@ -0,0 +1,396 @@ +import math +import numpy as np +import scipy +import scipy.optimize +import ROOT + +from RootObjects import Histogram, Graph, MultiGraph + +def KatzLog(passed, total): + """Returns 1-sigma confidence interval for a ratio of proportions using Katz-log method.""" + if np.count_nonzero(total) != len(total): + raise RuntimeError("Total can't be zero") + if np.count_nonzero(passed < 0) != 0 or np.count_nonzero(total < 0) != 0: + raise RuntimeError("Yields can't be negative") + if np.count_nonzero(passed > total) != 0: + raise RuntimeError("Passed can't be bigger than total") + if passed[0] == 0 and passed[1] == 0: + return (0, math.inf) + if passed[0] == total[0] and passed[1] == total[1]: + y1 = total[0] - 0.5 if total[0] > 0.5 else total[0] * 0.99 + y2 = total[1] - 0.5 if total[1] > 0.5 else total[1] * 0.99 + # in some sources -1 instead of -0.5 is recommended for y2 + else: + y1 = passed[0] if passed[0] != 0 else 0.5 + y2 = passed[1] if passed[1] != 0 else 0.5 + n1 = total[0] + n2 = total[1] + pi1 = y1 / n1 + pi2 = y2 / n2 + theta = pi1 / pi2 + sigma2 = (1 - pi1) / (pi1 * n1) + (1 - pi2) / (pi2 * n2) + if sigma2 < 0: + raise RuntimeError("Invalid inputs: passed={}, total={}".format(passed, total)) + sigma = math.sqrt(sigma2) + return (theta * math.exp(-sigma), theta * math.exp(sigma)) + +def data_eff_confint(n_passed, n_total, n_passed_err, n_total_err): + #print(":") + #print(" n_passed = %1.2f +/- %1.2f" % (n_passed, n_passed_err)) + #print(" n_total = %1.2f +/- %1.2f" % (n_total, n_total_err)) + hist_passed = ROOT.TH1D("histogram_passed", "histogram_passed", 1, -0.5, +0.5) + hist_passed.SetBinContent(1, n_passed) + hist_passed.SetBinError(1, n_passed_err) + hist_total = ROOT.TH1D("histogram_total", "histogram_total", 1, -0.5, +0.5) + hist_total.SetBinContent(1, n_total) + hist_total.SetBinError(1, n_total_err) + eff = ROOT.TEfficiency(hist_passed, hist_total) + eff.SetStatisticOption(ROOT.TEfficiency.kFWilson) + eff_low = eff.GetEfficiency(1) - eff.GetEfficiencyErrorLow(1) + eff_high = eff.GetEfficiency(1) + eff.GetEfficiencyErrorUp(1) + return eff_low, eff_high + +def weighted_eff_confint_freqMC(n_passed, n_failed, n_passed_err, n_failed_err, alpha=1-0.68, n_gen=100000, + max_gen_iters=100, min_stat=80000, seed=42, symmetric=True): + #print(":") + #print(" n_passed = %1.2f +/- %1.2f" % (n_passed, n_passed_err)) + #print(" n_failed = %1.2f +/- %1.2f" % (n_failed, n_failed_err)) + assert n_passed >= 0 + assert n_failed >= 0 + assert n_passed_err >= 0 + assert n_failed_err >= 0 + assert alpha > 0 and alpha < 1 + assert n_gen > 0 + assert max_gen_iters > 0 + assert min_stat > 0 + failed_mc = np.empty(0) + passed_mc = np.empty(0) + if seed is not None: + np.random.seed(seed) + for gen_iter in range(max_gen_iters): + new_failed_mc = np.random.normal(n_failed, n_failed_err, n_gen) + new_passed_mc = np.random.normal(n_passed, n_passed_err, n_gen) + sel = (new_failed_mc >= 0) & (new_passed_mc >= 0) + failed_mc = np.append(failed_mc, new_failed_mc[sel]) + passed_mc = np.append(passed_mc, new_passed_mc[sel]) + n_samples = len(passed_mc) + if n_samples >= min_stat: + eff = passed_mc / (passed_mc + failed_mc) + break + if n_samples < min_stat: + raise RuntimeError("Unable to estimate confinterval please, increase MC statistics.") + eff_exp = n_passed / float(n_passed + n_failed) + + if symmetric: + def coverage(delta_eff): + x = np.count_nonzero((eff > eff_exp - delta_eff) & (eff < eff_exp + delta_eff)) + return x / float(n_samples) + opt = scipy.optimize.root_scalar(lambda x: coverage(x) - 1 + alpha, bracket=(0, 1), method='bisect') + if not opt.converged: + raise RuntimeError("weighted_eff_confint_freqMC: unable to find a symmetric conf interval.") + q_down = max(0., eff_exp - opt.root) + q_up = min(1., eff_exp + opt.root) + return q_down, q_up + else: + eff_up = eff[eff > eff_exp] + eff_down = eff[eff <= eff_exp] + frac_up = len(eff_up) / float(n_samples) + frac_down = len(eff_down) / float(n_samples) + assert frac_up > 0 + assert frac_down > 0 + + def L(alpha_up, return_interal=False): + alpha_up = min(alpha, max(0, alpha_up)) + alpha_down = (alpha - alpha_up) + alpha_up_scaled = alpha_up / frac_up + alpha_down_scaled = min(1., alpha_down / frac_down) + + q_up = np.quantile(eff_up, 1 - alpha_up_scaled) if alpha_up != 0 else 1. + q_down = np.quantile(eff_down, alpha_down_scaled) if alpha_down != 0 else 0. + l = q_up - q_down + if return_interal: + return l, q_down, q_up + return l + + opt = scipy.optimize.minimize_scalar(L, bounds=(0, min(alpha, frac_up)), method='Bounded') + if not opt.success: + raise RuntimeError("weighted_eff_confint_freqMC: unable to find a conf interval with the minimal size.") + + _, q_down, q_up = L(opt.x, True) + return q_down, q_up + +def ListToStdVector(l, elem_type='string'): + v = ROOT.std.vector(elem_type)() + for x in l: + if elem_type in ['Int_t', 'UInt_t']: + x = int(x) + v.push_back(x) + return v + +def RemoveOverflowBins(hist): + for bin in [ 0, hist.GetNbinsX() + 1 ]: + hist.SetBinContent(bin, 0) + hist.SetBinError(bin, 0) + +def FixNegativeBins(hist, fix_integral=False, max_rel_shift=0.65): + has_fixes = False + integral = hist.Integral() + if integral <= 0: + raise RuntimeError("Unable to fix negative bins if integral <= 0.") + for n in range(hist.GetNbinsX() + 2): + x = hist.GetBinContent(n) + if x < 0: + x_err = hist.GetBinError(n) + if x + 3.*x_err < 0: + raise RuntimeError("Yield in bin {} is {} +- {}. Negative bin for which the yield is not statistically" + " compatible with 0 can't be fixed.".format(n, x, x_err)) + hist.SetBinError(n, math.sqrt(x_err ** 2 + x ** 2)) + hist.SetBinContent(n, 0) + has_fixes = True + if has_fixes: + new_integral = hist.Integral() + total_rel_shift = abs(new_integral - integral) / integral + if total_rel_shift > max_rel_shift: + print("total_rel_shift: %1.2f, max_rel_shift %1.2f" % (total_rel_shift, max_rel_shift)) + raise RuntimeError("The overal shift to the integral due to negative bins = {} is above the allowed limit" \ + " = {}.".format(total_rel_shift, max_rel_shift)) + if fix_integral: + sf = integral / new_integral + hist.Scale(sf) + +def FixEfficiencyBins(hist_passed, hist_total, remove_overflow=True): + if remove_overflow: + RemoveOverflowBins(hist_passed) + RemoveOverflowBins(hist_total) + FixNegativeBins(hist_passed) + FixNegativeBins(hist_total) + for i in range(hist_total.GetNbinsX() + 2): + if hist_passed.GetBinLowEdge(i) != hist_total.GetBinLowEdge(i): + raise ValueError("Histograms passed as function arguments have incompatible binning !!") + delta = hist_passed.GetBinContent(i) - hist_total.GetBinContent(i) + if delta > 0: + if delta > hist_passed.GetBinError(i): + print("Warning: The number of passed events = {} +/- {} is above the total number events" \ + " = {} +/- {} in bin {} [{}, {})." \ + .format(hist_passed.GetBinContent(i), hist_passed.GetBinError(i), + hist_total.GetBinContent(i), hist_total.GetBinError(i), i, + hist_total.GetBinLowEdge(i), + hist_total.GetBinLowEdge(i) + hist_total.GetBinWidth(i))) + print(" Setting bin-content of 'pass' histogram for bin #{} to {}.".format(i, hist_total.GetBinContent(i))) + hist_passed.SetBinError(i, math.sqrt(hist_passed.GetBinError(i) ** 2 + delta ** 2)) + hist_passed.SetBinContent(i, hist_total.GetBinContent(i)) + +def dumpHistogram(histName, n_bins, hist_binEdges, hist_binContents, hist_binErrors2): + if len(hist_binEdges) != (n_bins + 1) or len(hist_binContents) != n_bins or len(hist_binErrors2) != n_bins: + raise ValueError("Internal error !!") + print("histogram = %s" % histName) + print(" bin-contents = ", hist_binContents) + print(" bin-errors = ", [ math.sqrt(hist_binError2) for hist_binError2 in hist_binErrors2]) + ##print(" bin-error/bin-content = ", [ math.sqrt(hist_binErrors2[i])/hist_binContents[i] if hist_binContents[i] > 0. else 0.5 for i in range(n_bins) ]) + +def AutoRebinAndEfficiency(hist_passed_a, hist_total_a, hist_passed_b, hist_total_b, max_binError_div_binContent = 0.50): + + ##print(":") + + n_bins = hist_passed_a.GetNbinsX() + if hist_total_a.GetNbinsX() != n_bins or hist_passed_b.GetNbinsX() != n_bins or hist_total_b.GetNbinsX() != n_bins: + raise ValueError("Histograms passed as function arguments have incompatible binning !!") + + # CV: convert histograms from ROOT's TH1 to Konstantin's Histogram type + # (defined in TauTriggerTools/Common/python/RootObjects.py) + myhists = [ hist_passed_a, hist_total_a, hist_passed_b, hist_total_b ] + for i in range(len(myhists)): + if type(myhists[i]) != Histogram: + myhists[i] = Histogram(myhists[i]) + myhist_passed_a = myhists[0] + myhist_total_a = myhists[1] + myhist_passed_b = myhists[2] + myhist_total_b = myhists[3] + + ##print("BEFORE rebinning:") + ##print("#bins = %i" % n_bins) + ##print(" bin-edges = ", myhist_total_a.edges) + ##dumpHistogram("data, passed", n_bins, myhist_total_a.edges, myhist_passed_a.values, myhist_passed_a.errors) + ##dumpHistogram("data, total", n_bins, myhist_total_a.edges, myhist_total_a.values, myhist_total_a.errors) + ##print("efficiency (data) = ", [ myhist_passed_a.values[i]/myhist_total_a.values[i] for i in range(n_bins) ]) + ##dumpHistogram("mc, passed", n_bins, myhist_total_a.edges, myhist_passed_b.values, myhist_passed_b.errors) + ##dumpHistogram("mc, total", n_bins, myhist_total_a.edges, myhist_total_b.values, myhist_total_b.errors) + ##print("efficiency (mc) = ", [ myhist_passed_b.values[i]/myhist_total_b.values[i] for i in range(n_bins) ]) + + hists_rebinned_binContents = [ [], [], [], [] ] + hists_rebinned_binErrors2 = [ [], [], [], [] ] + hist_rebinned_binEdges = [] + n_bins_rebinned = 0 + + is_unmerged_bin = False + + # merge bins of the original histogram from left to right + # until sufficient event statistics is accumulated in each bin of each rebinned histogram + for idx_bin in range(n_bins): + + binEdge = myhist_passed_a.edges[idx_bin] + if abs(myhist_total_a.edges[idx_bin] - binEdge) > 1.e-1 or abs(myhist_passed_b.edges[idx_bin] - binEdge) > 1.e-1 or abs(myhist_total_b.edges[idx_bin] - binEdge) > 1.e-1: + raise ValueError("Histograms passed as function arguments have incompatible binning !!") + + if idx_bin == 0: + hist_rebinned_binEdges.append(myhist_total_a.edges[idx_bin]) + + is_sufficient_stats = True + for idx_hist in range(len(myhists)): + binContent = myhists[idx_hist].values[idx_bin] + if len(hists_rebinned_binContents[idx_hist]) < (n_bins_rebinned + 1): + hists_rebinned_binContents[idx_hist].append(0.) + hists_rebinned_binContents[idx_hist][n_bins_rebinned] += binContent + + binError2 = myhists[idx_hist].errors[idx_bin] ** 2 + if len(hists_rebinned_binErrors2[idx_hist]) < (n_bins_rebinned + 1): + hists_rebinned_binErrors2[idx_hist].append(0.) + hists_rebinned_binErrors2[idx_hist][n_bins_rebinned] += binError2 + + # CV: require that all rebinned histograms have non-negative bin-contents + if not hists_rebinned_binContents[idx_hist][n_bins_rebinned] >= 0.: + is_sufficient_stats = False + # CV: require that all rebinned "total" histograms have positive bin-contents + if (idx_hist == 1 or idx_hist == 3) and not hists_rebinned_binContents[idx_hist][n_bins_rebinned] > 0.: + is_sufficient_stats = False + # CV: require that relative uncertainty (= bin-error/bin-content) is below threshold for all rebinned "total" histograms + # (and bin contains at least one evemt, to make condition on bin-error/bin-content well-defined) + if hists_rebinned_binContents[idx_hist][n_bins_rebinned] > 1.: + binError_div_binContent = math.sqrt(hists_rebinned_binErrors2[idx_hist][n_bins_rebinned])/hists_rebinned_binContents[idx_hist][n_bins_rebinned] + if (idx_hist == 1 or idx_hist == 3) and (binError_div_binContent > max_binError_div_binContent): + is_sufficient_stats = False + # CV: require that relative uncertainty (= bin-error/bin-content) is below 1.5times threshold for all rebinned "passed" and "failed" histograms also + # (and bins contains at least one evemt, to make conditions on bin-error/bin-content well-defined) + if hists_rebinned_binContents[idx_hist][n_bins_rebinned] > 1.: + if (idx_hist == 0 or idx_hist == 2) and (binError_div_binContent > 1.5*max_binError_div_binContent): + is_sufficient_stats = False + if (idx_hist == 1 or idx_hist == 3) and is_sufficient_stats: + binContent_failed = hists_rebinned_binContents[idx_hist][n_bins_rebinned] - hists_rebinned_binContents[idx_hist - 1][n_bins_rebinned] + if binContent_failed > 0.: + binError2_failed = hists_rebinned_binErrors2[idx_hist][n_bins_rebinned] - hists_rebinned_binErrors2[idx_hist - 1][n_bins_rebinned] + binError_div_binContent_failed = math.sqrt(max(0., binError2_failed))/binContent_failed + if binError_div_binContent_failed > 1.5*max_binError_div_binContent: + is_sufficient_stats = False + else: + is_sufficient_stats = False + # CV: require that number of events in "passed" histogram is less than or equal to number of events in "total" histogram + if hists_rebinned_binContents[0][n_bins_rebinned] >= hists_rebinned_binContents[1][n_bins_rebinned] or \ + hists_rebinned_binContents[2][n_bins_rebinned] >= hists_rebinned_binContents[3][n_bins_rebinned]: + is_sufficient_stats = False + if is_sufficient_stats: + hist_rebinned_binEdges.append(myhist_total_a.edges[idx_bin + 1]) + n_bins_rebinned += 1 + is_unmerged_bin = False + else: + is_unmerged_bin = True + + # merge events in last two bins in case last bin does not have sufficient event statistics + if is_unmerged_bin: + if n_bins_rebinned >= 1: + # CV: merge bins of the rebinned histogram from right to left + # until sufficient event statistics is accumulated in each bin + n_bins_rebinned += 1 + keep_merging = True + while keep_merging and n_bins_rebinned >= 2: + keep_merging = False + for idx_hist in range(len(myhists)): + hists_rebinned_binContents[idx_hist][n_bins_rebinned - 2] += hists_rebinned_binContents[idx_hist][n_bins_rebinned - 1] + hists_rebinned_binContents[idx_hist].pop() + if hists_rebinned_binContents[idx_hist][n_bins_rebinned - 2] < 0.: + keep_merging = True + + hists_rebinned_binErrors2[idx_hist][n_bins_rebinned - 2] += hists_rebinned_binErrors2[idx_hist][n_bins_rebinned - 1] + hists_rebinned_binErrors2[idx_hist].pop() + hist_rebinned_binEdges.pop() + if not is_unmerged_bin: + print("Warning: Negative number of events encountered in the rightmost bin.") + print(" Merging the two rightmost bins...") + n_bins_rebinned -= 1 + is_unmerged_bin = False + else: + # CV: always create at least one bin, even if the event statistics in that bin is not sufficient + n_bins_rebinned = 1 + + if len(hist_rebinned_binEdges) < (n_bins_rebinned + 1): + hist_rebinned_binEdges.append(0.) + hist_rebinned_binEdges[n_bins_rebinned] = myhist_total_a.edges[n_bins] + + if n_bins_rebinned < 2: + print("Warning: Using max_binError_div_binContent = %1.2f results in a single bin !!" % max_binError_div_binContent) + print(" Fit of turn-on curve requires at least two bins.") + print(" Increasing max_binError_div_binContent parameter to %1.2f and trying again..." % (2*max_binError_div_binContent)) + return AutoRebinAndEfficiency(hist_passed_a, hist_total_a, hist_passed_b, hist_total_b, 2*max_binError_div_binContent) + + if len(hist_rebinned_binEdges) != (n_bins_rebinned + 1): + raise ValueError("Internal error !!") + ##print("AFTER rebinning:") + ##print("#bins = %i" % n_bins_rebinned) + ##print(" bin-edges = ", hist_rebinned_binEdges) + ##dumpHistogram("data, passed", n_bins_rebinned, hist_rebinned_binEdges, hists_rebinned_binContents[0], hists_rebinned_binErrors2[0]) + ##dumpHistogram("data, total", n_bins_rebinned, hist_rebinned_binEdges, hists_rebinned_binContents[1], hists_rebinned_binErrors2[1]) + ##print("efficiency (data) = ", [ hists_rebinned_binContents[0][i]/hists_rebinned_binContents[1][i] for i in range(n_bins_rebinned) ]) + ##dumpHistogram("mc, passed", n_bins_rebinned, hist_rebinned_binEdges, hists_rebinned_binContents[2], hists_rebinned_binErrors2[2]) + ##dumpHistogram("mc, total", n_bins_rebinned, hist_rebinned_binEdges, hists_rebinned_binContents[3], hists_rebinned_binErrors2[3]) + ##print("efficiency (mc) = ", [ hists_rebinned_binContents[2][i]/hists_rebinned_binContents[3][i] for i in range(n_bins_rebinned) ]) + + # compute efficiency and build graph + graphs_a = MultiGraph(3, n_bins_rebinned) + graphs_b = MultiGraph(3, n_bins_rebinned) + for idx_bin_rebinned in range(n_bins_rebinned): + binEdge_low = hist_rebinned_binEdges[idx_bin_rebinned] + binEdge_high = hist_rebinned_binEdges[idx_bin_rebinned + 1] + binCenter = 0.5*(binEdge_low + binEdge_high) + + for hist in [ "a", "b" ]: + idx_passed = None + idx_total = None + graphs = None + label = None + if hist == "a": + idx_passed = 0 + idx_total = 1 + graphs = graphs_a + label = "data" + elif hist == "b": + idx_passed = 2 + idx_total = 3 + graphs = graphs_b + label = "mc" + else: + continue + + binContent_passed = hists_rebinned_binContents[idx_passed][idx_bin_rebinned] + binError_passed = math.sqrt(max(0., hists_rebinned_binErrors2[idx_passed][idx_bin_rebinned])) + ##print("%s, passed (bin %i): bin-content = %1.2f +/- %1.2f" % (label, idx_bin_rebinned, binContent_passed, binError_passed)) + binContent_total = hists_rebinned_binContents[idx_total][idx_bin_rebinned] + binError_total = math.sqrt(max(0., hists_rebinned_binErrors2[idx_total][idx_bin_rebinned])) + ##print("%s, total (bin %i): bin-content = %1.2f +/- %1.2f" % (label, idx_bin_rebinned, binContent_total, binError_total)) + binContent_failed = max(0., hists_rebinned_binContents[idx_total][idx_bin_rebinned] - hists_rebinned_binContents[idx_passed][idx_bin_rebinned]) + binError_failed = math.sqrt(max(0., hists_rebinned_binErrors2[idx_total][idx_bin_rebinned] - hists_rebinned_binErrors2[idx_passed][idx_bin_rebinned])) + ##print("%s, failed (bin %i): bin-content = %1.2f +/- %1.2f" % (label, idx_bin_rebinned, binContent_failed, binError_failed)) + eff = binContent_passed / binContent_total + if hist == "a": + # data + eff_low, eff_high = data_eff_confint(binContent_passed, binContent_total, binError_passed, binError_total) + eff_low = min(eff_low, eff - 1.e-3) + eff_high = max(eff_high, eff + 1.e-3) + else: + # mc + eff_low, eff_high = weighted_eff_confint_freqMC(binContent_passed, binContent_failed, binError_passed, binError_failed) + ##print("%s, eff = %1.2f + %1.2f - %1.2f" % (label, eff, eff_high - eff, eff - eff_low)) + graphs.x[idx_bin_rebinned] = binCenter + graphs.x_error_low[idx_bin_rebinned] = binCenter - binEdge_low + graphs.x_error_high[idx_bin_rebinned] = binEdge_high - binCenter + graphs.y[0, idx_bin_rebinned] = binContent_passed + graphs.y_error_low[0, idx_bin_rebinned] = math.sqrt(hists_rebinned_binErrors2[idx_passed][idx_bin_rebinned]) + graphs.y_error_high[0, idx_bin_rebinned] = graphs_a.y_error_low[0, idx_bin_rebinned] + graphs.y[1, idx_bin_rebinned] = binContent_total + graphs.y_error_low[1, idx_bin_rebinned] = math.sqrt(hists_rebinned_binErrors2[idx_total][idx_bin_rebinned]) + graphs.y_error_high[1, idx_bin_rebinned] = graphs_a.y_error_low[1, idx_bin_rebinned] + graphs.y[2, idx_bin_rebinned] = eff + graphs.y_error_low[2, idx_bin_rebinned] = eff - eff_low + graphs.y_error_high[2, idx_bin_rebinned] = eff_high - eff + ##print("eff_data = ", [ graphs_a.y[2, i] for i in range(n_bins_rebinned) ]) + ##print("eff_mc = ", [ graphs_b.y[2, i] for i in range(n_bins_rebinned) ]) + + return tuple(graphs_a.ToRootGraphs(n_bins_rebinned)) + tuple(graphs_b.ToRootGraphs(n_bins_rebinned)) diff --git a/Common/python/AnalysisTypes.py b/Common/python/AnalysisTypes.py new file mode 100644 index 00000000000..020707451ef --- /dev/null +++ b/Common/python/AnalysisTypes.py @@ -0,0 +1,51 @@ +def EnumToString(cl, value): + keys = [k for k in dir(cl) if k[0] != '_' ] + for key in keys: + if getattr(cl, key) == value: + return key + raise RuntimeError('Value "{}" is not part of the enum "{}".'.format(value, cl.__name__)) + +def ParseEnum(cl, str_value): + keys = [k for k in dir(cl) if k[0] != '_' ] + for key in keys: + if key == str_value: + return getattr(cl, key) + raise RuntimeError('String "{}" can\'t be parsed as an element of enum "{}".'.format(str_value, cl.__name__)) + +class TauSelection: + gen = 1 + pt = 2 + MVA = 4 + DeepTau = 8 + +class Channel: + etau = 1 + mutau = 2 + ditau = 4 + +class DiscriminatorWP: + VVVLoose = 0 + VVLoose = 1 + VLoose = 2 + Loose = 3 + Medium = 4 + Tight = 5 + VTight = 6 + VVTight = 7 + VVVTight = 8 + +class Process: + data = 0 + ztt_mc = 1 + zmm_mc = 2 + w_mc = 3 + ttbar_mc = 4 + qcd = 5 + +class SideBand: + OS_low_mT = 0 + OS_high_mT = 1 + SS_low_mT = 2 + SS_high_mT = 3 + signal = 4 + w_enriched = 5 diff --git a/Common/python/ProduceHelpers.py b/Common/python/ProduceHelpers.py index 67dce827755..70dbd0459bf 100644 --- a/Common/python/ProduceHelpers.py +++ b/Common/python/ProduceHelpers.py @@ -1,3 +1,5 @@ +import re + def readFileList(fileList, inputFileName, fileNamePrefix): """read intput file list from a text file""" inputFile = open(inputFileName, 'r') @@ -12,6 +14,13 @@ def addFilesToList(fileList, inputFiles, fileNamePrefix): if len(name) > 0 and name[0] != '#': fileList.append(fileNamePrefix + name) +def getYear(period): + """returns year for the given datataking period""" + match = re.search(r'^Run([0-9]+)', period) + if match is None: + raise RuntimeError('Unable to extract year from the datataking period = "{}"'.format(period)) + return int(match.group(1)) + def getGlobalTag(period, isMC): """ Returns global tag that should be used to run tuple production with 102X release. The recommended global tag values are taken from https://twiki.cern.ch/twiki/bin/viewauth/CMS/PdmVAnalysisSummaryTable @@ -59,7 +68,7 @@ def getMetFilters(period, isMC): metFilters_common = [ "Flag_goodVertices", "Flag_globalSuperTightHalo2016Filter", "Flag_HBHENoiseFilter", "Flag_HBHENoiseIsoFilter", - "Flag_EcalDeadCellTriggerPrimitiveFilter", "Flag_BadPFMuonFilter", "ecalBadCalibReducedMINIAODFilter" + "Flag_EcalDeadCellTriggerPrimitiveFilter", "Flag_BadPFMuonFilter" ] filters = metFilters_common[:] if period in ['Run2017', 'Run2018', 'Run2018ABC', 'Run2018D']: diff --git a/Common/python/RootObjects.py b/Common/python/RootObjects.py new file mode 100644 index 00000000000..c650ecaf702 --- /dev/null +++ b/Common/python/RootObjects.py @@ -0,0 +1,78 @@ +from array import array +import numpy as np +import ROOT + +class Histogram: + def __init__(self, th1_hist): + n_bins = th1_hist.GetNbinsX() + self.values = np.zeros(n_bins) + self.errors = np.zeros(n_bins) + self.edges = np.zeros(n_bins + 1) + for i in range(n_bins): + self.values[i] = th1_hist.GetBinContent(i + 1) + self.edges[i] = th1_hist.GetXaxis().GetBinLowEdge(i + 1) + self.errors[i] = th1_hist.GetBinError(i + 1) + self.edges[n_bins] = th1_hist.GetXaxis().GetBinUpEdge(n_bins) + + @staticmethod + def CreateTH1(values, edges, errors, fixed_step=False): + if fixed_step: + th1_hist = ROOT.TH1F('', '', len(values), edges[0], edges[-1]) + else: + th1_hist = ROOT.TH1F('', '', len(edges) - 1, array('f', edges)) + for n in range(len(values)): + th1_hist.SetBinContent(n + 1, values[n]) + th1_hist.SetBinError(n + 1, errors[n]) + return th1_hist + +class Graph: + def __init__(self, **kwargs): + if 'root_graph' in kwargs: + graph = kwargs['root_graph'] + n_points = graph.GetN() + elif 'n_points' in kwargs: + graph = None + n_points = kwargs['n_points'] + else: + raise RuntimeError("Invalid arguments for Graph init") + + self.x = np.zeros(n_points) + self.x_error_low = np.zeros(n_points) + self.x_error_high = np.zeros(n_points) + self.y = np.zeros(n_points) + self.y_error_low = np.zeros(n_points) + self.y_error_high = np.zeros(n_points) + + if graph is not None: + for n in range(n_points): + self.x[n] = graph.GetX()[n] + self.x_error_low[n] = graph.GetErrorXlow(n) + self.x_error_high[n] = graph.GetErrorXhigh(n) + self.y[n] = graph.GetY()[n] + self.y_error_low[n] = graph.GetErrorYlow(n) + self.y_error_high[n] = graph.GetErrorYhigh(n) + + def ToRootGraph(self, n_active_points=None): + n_points = n_active_points if n_active_points is not None else len(self.x) + return ROOT.TGraphAsymmErrors(n_points, array('d', self.x), array('d', self.y), + array('d', self.x_error_low), array('d', self.x_error_high), + array('d', self.y_error_low), array('d', self.y_error_high)) + +class MultiGraph: + def __init__(self, n_graphs, n_points): + self.x = np.zeros(n_points) + self.x_error_low = np.zeros(n_points) + self.x_error_high = np.zeros(n_points) + self.y = np.zeros((n_graphs, n_points)) + self.y_error_low = np.zeros((n_graphs, n_points)) + self.y_error_high = np.zeros((n_graphs, n_points)) + + def ToRootGraphs(self, n_active_points=None): + n_points = n_active_points if n_active_points is not None else self.x.shape[0] + root_graphs = [] + for n in range(self.y.shape[0]): + graph = ROOT.TGraphAsymmErrors(n_points, array('d', self.x), array('d', self.y[n, :]), + array('d', self.x_error_low), array('d', self.x_error_high), + array('d', self.y_error_low[n, :]), array('d', self.y_error_high[n, :])) + root_graphs.append(graph) + return root_graphs diff --git a/Common/python/RootPlotting.py b/Common/python/RootPlotting.py new file mode 100644 index 00000000000..114ee69f647 --- /dev/null +++ b/Common/python/RootPlotting.py @@ -0,0 +1,302 @@ +from array import array +import math +import numpy as np +import ROOT + +from AnalysisTools import KatzLog + +class TextAlign: + LeftBottom = ROOT.kHAlignLeft + ROOT.kVAlignBottom + LeftCenter = ROOT.kHAlignLeft + ROOT.kVAlignCenter + LeftTop = ROOT.kHAlignLeft + ROOT.kVAlignTop + CenterBottom = ROOT.kHAlignCenter + ROOT.kVAlignBottom + Center = ROOT.kHAlignCenter + ROOT.kVAlignCenter + CenterTop = ROOT.kHAlignCenter + ROOT.kVAlignTop + RightBottom = ROOT.kHAlignRight + ROOT.kVAlignBottom + RightCenter = ROOT.kHAlignRight + ROOT.kVAlignCenter + RightTop = ROOT.kHAlignRight + ROOT.kVAlignTop + + + +def ApplyDefaultGlobalStyle(): + ROOT.gStyle.SetPaperSize(20, 20) + ROOT.gStyle.SetPalette(1) + ROOT.gStyle.SetEndErrorSize(0) + ROOT.gStyle.SetPadGridX(False) + ROOT.gStyle.SetPadGridY(False) + ROOT.gStyle.SetPadTickX(False) + ROOT.gStyle.SetPadTickY(False) + ROOT.gStyle.SetTickLength(0.03, "X") + ROOT.gStyle.SetTickLength(0.03, "Y") + ROOT.gStyle.SetNdivisions(510, "X") + ROOT.gStyle.SetNdivisions(510, "Y") + ROOT.gStyle.SetOptStat(0) + +def ApplyDefaultLineStyle(obj, color): + obj.SetMarkerSize(4) + obj.SetMarkerColor(color) + obj.SetLineWidth(2) + obj.SetLineColor(color) + +def ApplyAxisSetup(frame_hist, ratio_frame_hist=None, x_title="", y_title="", ratio_y_title="", + axis_title_sizes=(0.055, 0.055), axis_title_offsets=(1,1.4), axis_label_sizes=(0.04,0.04), + axis_label_offsets=(0.005,0.005), ratio_item_size_sf=2.76, max_ratio=1.5, ratio_y_title_size=0.055, + ratio_y_title_offset=0.4, ratio_y_label_size=0.04, ratio_y_label_offset=0.005, ratio_n_div_y=505, + y_range=None): + frame_hist.GetYaxis().SetTitle(y_title) + frame_hist.GetYaxis().SetTitleSize(axis_title_sizes[1]) + frame_hist.GetYaxis().SetTitleOffset(axis_title_offsets[1]) + frame_hist.GetYaxis().SetLabelSize(axis_label_sizes[1]) + frame_hist.GetYaxis().SetLabelOffset(axis_label_offsets[1]) + if y_range is not None: + frame_hist.GetYaxis().SetRangeUser(*y_range) + if ratio_frame_hist is not None: + frame_hist.GetXaxis().SetTitle("") + frame_hist.GetXaxis().SetTitleSize(0) + frame_hist.GetXaxis().SetTitleOffset(0) + frame_hist.GetXaxis().SetLabelSize(0) + frame_hist.GetXaxis().SetLabelOffset(0) + + ratio_frame_hist.GetXaxis().SetTitle(x_title) + ratio_frame_hist.GetXaxis().SetTitleSize(axis_title_sizes[0] * ratio_item_size_sf) + ratio_frame_hist.GetXaxis().SetTitleOffset(axis_title_offsets[0]) + ratio_frame_hist.GetXaxis().SetLabelSize(axis_label_sizes[0] * ratio_item_size_sf) + ratio_frame_hist.GetXaxis().SetLabelOffset(axis_label_offsets[0]) + ratio_frame_hist.GetXaxis().SetNoExponent(True) + ratio_frame_hist.GetXaxis().SetMoreLogLabels(True) + + ratio_frame_hist.GetYaxis().SetTitle(ratio_y_title) + ratio_frame_hist.GetYaxis().SetTitleSize(ratio_y_title_size * ratio_item_size_sf) + ratio_frame_hist.GetYaxis().SetTitleOffset(ratio_y_title_offset) + ratio_frame_hist.GetYaxis().SetLabelSize(ratio_y_label_size * ratio_item_size_sf) + ratio_frame_hist.GetYaxis().SetLabelOffset(ratio_y_label_offset) + ratio_frame_hist.GetYaxis().SetNdivisions(ratio_n_div_y); + if max_ratio > 0: + ratio_frame_hist.GetYaxis().SetRangeUser(max(0., 2 - max_ratio), max_ratio) + else: + frame_hist.GetXaxis().SetTitle(x_title); + frame_hist.GetXaxis().SetTitleSize(axis_title_sizes[0]) + frame_hist.GetXaxis().SetTitleOffset(axis_title_offsets[0]) + frame_hist.GetXaxis().SetLabelSize(axis_label_sizes[0]) + frame_hist.GetXaxis().SetLabelOffset(axis_label_offsets[0]) + frame_hist.GetXaxis().SetNoExponent(True) + frame_hist.GetXaxis().SetMoreLogLabels(True) + +def CreateCanvas(size_x=700, size_y=700): + canvas = ROOT.TCanvas('', '', size_x, size_y) + canvas.SetFillColor(ROOT.kWhite) + canvas.SetBorderSize(10) + canvas.SetBorderMode(0) + return canvas + +class Box: + def __init__(self, left_bottom_x, left_bottom_y, right_top_x, right_top_y): + self.left_bottom_x = left_bottom_x + self.left_bottom_y = left_bottom_y + self.right_top_x = right_top_x + self.right_top_y = right_top_y + + def __iter__(self): + return iter((self.left_bottom_x, self.left_bottom_y, self.right_top_x, self.right_top_y)) + +class MarginBox: + def __init__(self, left, bottom, right, top): + self.left = left + self.bottom = bottom + self.right = right + self.top = top + +def SetMargins(main_pad, margin_box, ratio_pad=None, ratio_pad_y_size_sf=2.76, main_ratio_margin=0.04): + main_pad.SetLeftMargin(margin_box.left) + main_pad.SetRightMargin(margin_box.right) + main_pad.SetTopMargin(margin_box.top) + if ratio_pad is not None: + ratio_pad.SetLeftMargin(margin_box.left) + ratio_pad.SetRightMargin(margin_box.right) + ratio_pad.SetBottomMargin(margin_box.bottom * ratio_pad_y_size_sf); + ratio_pad.SetTopMargin(main_ratio_margin / 2 * ratio_pad_y_size_sf); + main_pad.SetBottomMargin(main_ratio_margin / 2) + else: + main_pad.SetBottomMargin(margin_box.bottom) + +def DrawLabel(text, pos, text_size=0.05, font=42, align=TextAlign.Center, angle=0, color=ROOT.kBlack, + line_spacing=1): + x = pos[0] + y = pos[1] + alpha = math.radians(angle) + sin_alpha = math.sin(alpha) + cos_alpha = math.cos(alpha) + + label_controls = [] + for line in text.split('\n'): + latex = ROOT.TLatex(x, y, line) + latex.SetNDC() + latex.SetTextSize(text_size) + latex.SetTextFont(font) + latex.SetTextAlign(align) + latex.SetTextAngle(angle) + latex.SetTextColor(color) + latex.Draw("SAME") + label_controls.append(latex) + shift_x = 0 + shift_y = -(1 + line_spacing) * latex.GetYsize() + x += shift_x * cos_alpha + shift_y * sin_alpha + y += -shift_x * sin_alpha + shift_y * cos_alpha + return label_controls + +def CreateTwoPadLayout(canvas, ref_hist, ratio_ref_hist, main_box=Box(0.02, 0.25, 0.95, 0.94), + margins=MarginBox(0.15, 0.14, 0.03, 0.02), ratio_pad_size=0.25, log_x=False, log_y=False, + title='', title_pos=(0.5, 0.96), title_text_size=0.05, title_font=42, title_color=ROOT.kBlack): + canvas.cd() + ratio_box = Box(main_box.left_bottom_x, main_box.left_bottom_y - ratio_pad_size, + main_box.right_top_x, main_box.left_bottom_y) + main_pad = ROOT.TPad('', '', *main_box) + ratio_pad = ROOT.TPad('', '', *ratio_box) + SetMargins(main_pad, margins, ratio_pad) + + main_pad.Draw() + ratio_pad.Draw() + + main_pad.cd() + main_pad.SetLogx(log_x) + main_pad.SetLogy(log_y) + ref_hist.Draw() + ref_hist.SetTitle('') + + ratio_pad.cd() + ratio_pad.SetLogx(log_x) + ratio_ref_hist.Draw() + ratio_ref_hist.SetTitle('') + + canvas.cd() + if title is not None and len(title) > 0: + canvas.SetTitle(title) + title_controls = DrawLabel(title, pos=title_pos, text_size=title_text_size, font=title_font, + align=TextAlign.Center, color=title_color) + else: + title_controls = None + + main_pad.cd() + return main_pad, ratio_pad, title_controls + +def CreateLegend(pos=(0.18, 0.78), size=(0.2, 0.15), fill_color=ROOT.kWhite, fill_style=0, border_size=0, + text_size=0.04, font=42): + legend = ROOT.TLegend(pos[0], pos[1], pos[0] + size[0], pos[1] + size[1]) + legend.SetFillColor(fill_color) + legend.SetFillStyle(fill_style) + legend.SetBorderSize(border_size) + legend.SetTextSize(text_size) + legend.SetTextFont(font) + + return legend + + +def CreateEfficiencyRatioGraph(hist_passed_a, hist_total_a, hist_passed_b, hist_total_b): + n_bins = hist_passed_a.GetNbinsX() + if hist_total_a.GetNbinsX() != n_bins or hist_passed_b.GetNbinsX() != n_bins or hist_total_b.GetNbinsX() != n_bins: + raise ValueError("Histograms passed as function arguments have incompatible binning !!") + x = np.zeros(n_bins) + y = np.zeros(n_bins) + exl = np.zeros(n_bins) + exh = np.zeros(n_bins) + eyl = np.zeros(n_bins) + eyh = np.zeros(n_bins) + + k = 0 + for n in range(n_bins): + passed_a = hist_passed_a.GetBinContent(n + 1) + total_a = hist_total_a.GetBinContent(n + 1) + passed_b = hist_passed_b.GetBinContent(n + 1) + total_b = hist_total_b.GetBinContent(n + 1) + if total_a == 0 or total_b == 0 or passed_a == 0 or passed_b == 0: continue + + x[k] = hist_passed_a.GetBinCenter(n + 1) + if abs(hist_total_a.GetBinCenter(n + 1) - x[k]) > 1.e-1 or abs(hist_passed_b.GetBinCenter(n + 1) - x[k]) > 1.e-1 or abs(hist_total_b.GetBinCenter(n + 1) - x[k]) > 1.e-1: + raise ValueError("Histograms passed as function arguments have incompatible binning !!") + exl[k] = hist_passed_a.GetBinWidth(n + 1) / 2 + exh[k] = exl[k] + y_down, y_up = KatzLog(np.array([passed_a, passed_b]), np.array([total_a, total_b])) + y[k] = (passed_a * total_b) / (passed_b * total_a) + eyl[k] = y_up - y[k] + eyh[k] = y[k] - y_down + k += 1 + if k == 0: + return None + return ROOT.TGraphAsymmErrors(k, array('d', x), array('d', y), array('d', exl), array('d', exh), + array('d', eyl), array('d', eyh)) + +def GetPrintSuffix(current_page_number, total_number_of_pages): + print_suffix = '' + if total_number_of_pages > 1: + if current_page_number == 0: + print_suffix = '(' + elif current_page_number == total_number_of_pages - 1: + print_suffix = ')' + return print_suffix + +def PrintAndClear(canvas, file, title, current_page_number, total_number_of_pages, pads = []): + canvas.Print(file + GetPrintSuffix(current_page_number, total_number_of_pages), + 'Title:{}'.format(title)) + for pad in pads: + pad.Clear() + canvas.Clear() + + +def GetYRange(curves, consider_errors=True): + y_values = [] + for curve in curves: + if type(curve) == ROOT.TH1D: + for bin_id in range(1, curve.GetNbinsX() + 1): + y = curve.GetBinContent(bin_id) + if consider_errors: + y_values.append(y - curve.GetBinErrorLow(bin_id)) + y_values.append(y + curve.GetBinErrorUp(bin_id)) + else: + y_values.append(y) + elif type(curve) == ROOT.TGraphAsymmErrors: + for n in range(curve.GetN()): + y = curve.GetY()[n] + if consider_errors: + y_values.append(y - curve.GetEYlow()[n]) + y_values.append(y + curve.GetEYhigh()[n]) + else: + y_values.append(y) + else: + raise RuntimeError('GetYRange: type = "" is not supported.'.format(type(curve))) + return min(y_values), max(y_values) + +def DivideByBinWidth(hist): + for n in range(1, hist.GetNbinsX() + 1): + new_value = hist.GetBinContent(n) / hist.GetBinWidth(n); + new_bin_error = hist.GetBinError(n) / hist.GetBinWidth(n); + hist.SetBinContent(n, new_value); + hist.SetBinError(n, new_bin_error); + +def HistogramToGraph(hist): + n_bins = hist.GetNbinsX() + x = np.zeros(n_bins) + y = np.zeros(n_bins) + exl = np.zeros(n_bins) + exh = np.zeros(n_bins) + eyl = np.zeros(n_bins) + eyh = np.zeros(n_bins) + + k = 0 + for n in range(n_bins): + bin_y = hist.GetBinContent(n + 1) + bin_y_err_low = hist.GetBinErrorLow(n + 1) + bin_y_err_up = hist.GetBinErrorUp(n + 1) + if bin_y == 0 and bin_y_err_low == 0 and bin_y_err_up == 0: continue + + x[k] = hist.GetBinCenter(n + 1) + exl[k] = hist.GetBinWidth(n + 1) / 2 + exh[k] = exl[k] + y[k] = bin_y + eyl[k] = bin_y_err_low + eyh[k] = bin_y_err_up + k += 1 + if k == 0: + return None + + return ROOT.TGraphAsymmErrors(k, array('d', x), array('d', y), array('d', exl), array('d', exh), + array('d', eyl), array('d', eyh)) diff --git a/Common/python/TriggerConfig.py b/Common/python/TriggerConfig.py new file mode 100644 index 00000000000..3ab45c4dc27 --- /dev/null +++ b/Common/python/TriggerConfig.py @@ -0,0 +1,72 @@ +import json +import numpy as np +import re + +def Load(file_name): + with open(file_name) as f: + trig_desc = json.load(f) + channel_triggers = {} + for trig_name, desc in trig_desc.items(): + if 'target_channels' in desc: + for channel in desc['target_channels']: + if channel not in channel_triggers: + channel_triggers[channel] = [] + desc['name'] = trig_name + channel_triggers[channel].append(desc) + return trig_desc, channel_triggers + +def LoadAsVPSet(file_name): + import FWCore.ParameterSet.Config as cms + with open(file_name) as f: + trig_desc = json.load(f) + trig_vpset = cms.VPSet() + tag_path_names = [] + for trig_name, desc in trig_desc.iteritems(): + filters = [ str(','.join(path_list)) for path_list in desc['filters'] ] + is_tag = 'is_tag' in desc and desc['is_tag'] > 0 + leg_types = [ str(leg_type) for leg_type in desc['leg_types'] ] + pset = cms.PSet( + path = cms.string(str(trig_name)), + filters = cms.vstring(filters), + leg_types = cms.vstring(leg_types), + is_tag = cms.bool(is_tag) + ) + trig_vpset.append(pset) + if is_tag: + tag_path_names.append(str(trig_name)) + return trig_vpset, tag_path_names + +def _CreateDictionary(summary, key_name, value_name, name): + result_dict = {} + for entry_id in range(len(summary[key_name])): + keys = np.array(summary[key_name][entry_id]) + values = np.array(summary[value_name][entry_id]) + for n in range(len(keys)): + if keys[n] in result_dict: + if result_dict[keys[n]] != values[n]: + raise RuntimeError("Inconsistent {} information in the input ROOT files.".format(name)) + else: + result_dict[keys[n]] = values[n] + return result_dict + +def LoadTriggerDictionary(files): + import ROOT + df_support = ROOT.RDataFrame('summary', files) + summary = df_support.AsNumpy() + trigger_dict = _CreateDictionary(summary, 'trigger_pattern', 'trigger_index', 'trigger') + filter_dict = _CreateDictionary(summary, 'filter_name', 'filter_hash', 'filter') + return trigger_dict, filter_dict + +def GetMatchedTriggers(trigger_dict, pattern): + reg_ex = re.compile(pattern) + matched = {} + for name, pos in trigger_dict.items(): + if reg_ex.match(name) is not None: + matched[name] = pos + return matched + +def GetMatchMask(hlt_paths): + match_mask = 0 + for path_name, path_index in hlt_paths.items(): + match_mask = match_mask | (1 << path_index) + return match_mask diff --git a/Common/python/crabTools.py b/Common/python/crabTools.py new file mode 100644 index 00000000000..ce4eec65d73 --- /dev/null +++ b/Common/python/crabTools.py @@ -0,0 +1,101 @@ +# Definition of tools for CRAB job submission. +# This file is part of https://github.com/hh-italian-group/h-tautau. + +import re +from sets import Set +from CRABClient.UserUtilities import ClientException +from CRABAPI.RawCommand import crabCommand +from httplib import HTTPException + +def submit(config): + try: + crabCommand('submit', config = config) + except HTTPException as hte: + print str(hte) + print "\n{}\nERROR: failed to submit task due to HTTPException.\n{}".format(hte, hte.headers) + except ClientException as cle: + print "ERROR: failed to submit task due to ClientException.\n{}".format(cle) + +class Job: + def __init__(self, line, jobNameSuffix = ''): + items = filter(lambda s: len(s) != 0, re.split(" |\t", line)) + n_items = len(items) + if n_items < 2 or n_items > 3: + raise RuntimeError("invalid job description = '{}'.".format(line)) + self.jobName = items[0] + self.requestName = self.jobName + jobNameSuffix + self.inputDataset = items[1] + if n_items > 2: + self.lumiMask = items[2] + else: + self.lumiMask = None + + + def __str__(self): + str = "requestName = '{}', inputDataset = '{}'".format(self.requestName, self.inputDataset) + if self.lumiMask is not None: + str += ", lumiMask = '{}'".format(self.lumiMask) + return str + + def submit(self, config): + config.General.requestName = self.requestName + config.Data.inputDataset = self.inputDataset + if self.lumiMask is not None: + config.Data.lumiMask = self.lumiMask + submit(config) + +class JobCollection: + def __init__(self, file_name, job_names = '', lumi_mask = '', jobNameSuffix = ''): + self.jobs = [] + self.jobNames = job_names + input_file = open(file_name, 'r') + lines = [ s.strip() for s in input_file.readlines() ] + lines = filter(lambda s: len(s) != 0 and s[0] != '#', lines) + if len(lines) <= 1: + raise RuntimeError("file '{}' is empty".format(file_name)) + header_items = filter(lambda s: len(s) != 0, re.split(" |\n", lines[0])) + index_line = 0 + if header_items[0].startswith("lumiMask"): + index_line = 1 + lumi = filter(lambda s: len(s) != 0, re.split("=", header_items[0])) + self.lumiMask = lumi[1] + else: + self.lumiMask = '' + self.pyCfgParams = filter(lambda s: len(s) != 0, re.split(" |\t", lines[index_line])) + + if len(header_items) > 0: + if header_items[0].lower() == "signal": + if len(lines) < 4: + raise RuntimeError("invalid signal jobs definition in file '{}'".format(file_name)) + masses = filter(lambda s: len(s) != 0, re.split(" |\t", lines[2])) + template = lines[3] + for mass in masses: + line = template.format(M = mass) + self.jobs.append(Job(line)) + return + if len(lumi_mask) != 0: + self.lumiMask = lumi_mask + + index_sample = 1 + if header_items[0].startswith("lumiMask"): + index_sample = 2 + for line in lines[index_sample:]: + self.jobs.append(Job(line, jobNameSuffix)) + input_file.close() + + def __str__(self): + result = "cfgParams = {}, lumiMask = '{}'".format(self.pyCfgParams, self.lumiMask) + for job in self.jobs: + if len(self.jobNames) == 0 or job.jobName in self.jobNames: + result += "\n" + str(job) + return result + + def submit(self, config, splitting, unitsPerJob): + config.JobType.pyCfgParams = self.pyCfgParams + config.Data.unitsPerJob = unitsPerJob + config.Data.splitting = splitting + + for job in self.jobs: + if len(self.jobNames) == 0 or job.jobName in self.jobNames: + config.Data.lumiMask = self.lumiMask + job.submit(config) diff --git a/Common/scripts/crab_cmd.py b/Common/scripts/crab_cmd.py new file mode 100755 index 00000000000..774f7c89a54 --- /dev/null +++ b/Common/scripts/crab_cmd.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# Execute crab command for multiple tasks. + +import argparse +import os +import subprocess +import sys + +parser = argparse.ArgumentParser(description='Execute crab command for multiple tasks.', + formatter_class = lambda prog: argparse.HelpFormatter(prog,width=90)) +parser.add_argument('--workArea', required=True, type=str, help="Working area") +parser.add_argument('--cmd', required=True, type=str, help="CRAB command") +parser.add_argument('cmd_args', type=str, nargs='*', help="Arguments for the CRAB command (if any)") +args = parser.parse_args() + +if args.cmd == 'submit': + print("ERROR: Please, use crab_submit.py to run the submit command.") + sys.exit(1) +cmd_args_str = ' '.join(args.cmd_args) + +def sh_call(cmd): + sep = '-' * (len(cmd) + 3) + print('{}\n>> {}'.format(sep, cmd)) + result = subprocess.call([cmd], shell=True) + if result != 0: + print('ERROR: failed to run "{}"'.format(cmd)) + sys.exit(1) + +for dir in os.listdir(args.workArea): + task_dir = os.path.join(args.workArea, dir) + if not os.path.isdir(task_dir): continue + + cmd_line = 'crab {} -d {} {}'.format(args.cmd, task_dir, cmd_args_str) + sh_call(cmd_line) diff --git a/Common/scripts/crab_merge.py b/Common/scripts/crab_merge.py new file mode 100644 index 00000000000..2a0c6683a12 --- /dev/null +++ b/Common/scripts/crab_merge.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# Submit jobs on CRAB. +# This file is part of https://github.com/cms-tau-pog/TauTriggerTools. + +import argparse +import os +import re +import subprocess +import sys + +parser = argparse.ArgumentParser(description='Merge outputs of the finished CRAB jobs.') +parser.add_argument('crabOutput', nargs=1, type=str, help="Path with output of crab jobs") +parser.add_argument('mergedOutput', nargs=1, type=str, help="Path where to store merged outputs") +args = parser.parse_args() + +input = args.crabOutput[0] +output = args.mergedOutput[0] +if not os.path.exists(output): + os.makedirs(output) +output = os.path.abspath(output) + +merged_jobs = [] +skipped_jobs = [] +for dataset_dir in sorted(os.listdir(input)): + dataset_path = os.path.join(input, dataset_dir) + if not os.path.isdir(dataset_path): continue + for crab_name_dir in sorted(os.listdir(dataset_path)): + crab_name_path = os.path.join(dataset_path, crab_name_dir) + if not os.path.isdir(crab_name_path): continue + crab_name = re.sub(r'^crab_', '', crab_name_dir) + output_name = crab_name + '.root' + output_path = os.path.join(output, output_name) + if os.path.isfile(output_path): + print('{} already exists.'.format(output_name)) + skipped_jobs.append(crab_name) + continue + print('Merging "{}" ...'.format(output_name)) + crab_job_ids = os.listdir(crab_name_path) + if len(crab_job_ids) != 1: + raise RuntimeError('More than 1 job id for crab job {} is present.'.format(crab_name_path)) + crab_job_id_path = os.path.join(crab_name_path, crab_job_ids[0]) + root_files = [] + for file_block in os.listdir(crab_job_id_path): + file_block_path = os.path.join(crab_job_id_path, file_block) + if not os.path.isdir(file_block_path): continue + for file in os.listdir(file_block_path): + file_full_path = os.path.join(file_block_path, file) + if not os.path.isfile(file_full_path) or not file.endswith('.root'): continue + root_files.append(os.path.join(file_block, file)) + + cmd = 'cd {} ; hadd -O -ff -n 11 {} {}'.format(crab_job_id_path, output_path, ' '.join(root_files)) + result = subprocess.call([cmd], shell=True) + if result != 0: + raise RuntimeError('Failed to merge "{}" into "{}"'.format(crab_job_id_path, output_path)) + merged_jobs.append(crab_name) +if len(skipped_jobs): + print('Following jobs were skipped, because corresponding file already exists: {}'.format(' '.join(skipped_jobs))) +if len(merged_jobs): + print('Following jobs has been merged: {}'.format(' '.join(merged_jobs))) diff --git a/Common/scripts/crab_submit.py b/Common/scripts/crab_submit.py new file mode 100755 index 00000000000..bf60522ee7e --- /dev/null +++ b/Common/scripts/crab_submit.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# Submit jobs on CRAB. +# This file is part of https://github.com/cms-tau-pog/TauTriggerTools. + +import argparse +import subprocess +import sys + +parser = argparse.ArgumentParser(description='Submit jobs on CRAB.', + formatter_class = lambda prog: argparse.HelpFormatter(prog,width=90)) +parser.add_argument('--workArea', required=True, type=str, help="Working area") +parser.add_argument('--cfg', required=True, type=str, help="CMSSW configuration file") +parser.add_argument('--site', required=True, type=str, help="Site for stage out.") +parser.add_argument('--output', required=True, type=str, help="output path after /store/user/USERNAME") +parser.add_argument('--blacklist', required=False, type=str, default="", + help="list of sites where the jobs shouldn't run") +parser.add_argument('--whitelist', required=False, type=str, default="", + help="list of sites where the jobs can run") +parser.add_argument('--jobNames', required=False, type=str, default="", + help="list of job names to submit (if not specified - submit all)") +parser.add_argument('--lumiMask', required=False, type=str, default="", + help="json file with a lumi mask (default: apply lumi mask from the config file)") +parser.add_argument('--jobNameSuffix', required=False, type=str, default="", + help="suffix that will be added to each job name") +parser.add_argument('--inputDBS', required=False, default="global", help="DBS instance") +parser.add_argument('--splitting', required=False, default="Automatic", + help="suffix that will be added to each job name") +parser.add_argument('--unitsPerJob', required=False, type=int, default=1000, help="number of units per job") +parser.add_argument('--maxMemory', required=False, type=int, default=2000, + help="maximum amount of memory (in MB) a job is allowed to use (default: 2000 MB )") +parser.add_argument('--numCores', required=False, type=int, default=1, help="number of cores per job (default: 1)") +parser.add_argument('--allowNonValid', action="store_true", help="Allow nonvalid dataset as an input.") +parser.add_argument('job_file', type=str, nargs='+', help="text file with jobs descriptions") +args = parser.parse_args() + +for job_file in args.job_file: + cmd = 'crab_submit_file.py --jobFile "{}"'.format(job_file) + for arg_name,arg_value in vars(args).iteritems(): + if arg_name != 'job_file' and type(arg_value) != bool and (type(arg_value) != str or len(arg_value)): + cmd += ' --{} {} '.format(arg_name, arg_value) + elif type(arg_value) == bool: + cmd += ' --{} '.format(arg_name) + print '> {}'.format(cmd) + result = subprocess.call([cmd], shell=True) + if result != 0: + print('ERROR: failed to submit jobs from "{}"'.format(job_file)) + sys.exit(1) diff --git a/Common/scripts/crab_submit_file.py b/Common/scripts/crab_submit_file.py new file mode 100755 index 00000000000..6e5f4698d58 --- /dev/null +++ b/Common/scripts/crab_submit_file.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# Submit jobs on CRAB. +# This file is part of https://github.com/cms-tau-pog/TauTriggerTools. + +import argparse +import sys +import re +from sets import Set + +parser = argparse.ArgumentParser(description='Submit jobs on CRAB.', + formatter_class = lambda prog: argparse.HelpFormatter(prog,width=90)) +parser.add_argument('--workArea', required=True, type=str, help="Working area") +parser.add_argument('--cfg', required=True, type=str, help="CMSSW configuration file") +parser.add_argument('--site', required=True, type=str, help="Site for stage out.") +parser.add_argument('--output', required=True, type=str, help="output path after /store/user/USERNAME") +parser.add_argument('--blacklist', required=False, type=str, default="", + help="list of sites where the jobs shouldn't run") +parser.add_argument('--whitelist', required=False, type=str, default="", + help="list of sites where the jobs can run") +parser.add_argument('--jobNames', required=False, type=str, default="", + help="list of job names to submit (if not specified - submit all)") +parser.add_argument('--lumiMask', required=False, type=str, default="", + help="json file with a lumi mask (default: apply lumi mask from the config file)") +parser.add_argument('--jobNameSuffix', required=False, type=str, default="", + help="suffix that will be added to each job name") +parser.add_argument('--inputDBS', required=False, default="global", help="DBS instance") +parser.add_argument('--splitting', required=False, default="Automatic", + help="suffix that will be added to each job name") +parser.add_argument('--unitsPerJob', required=False, type=int, default=1000, help="number of units per job") +parser.add_argument('--maxMemory', required=False, type=int, default=2000, + help="maximum amount of memory (in MB) a job is allowed to use (default: 2000 MB )") +parser.add_argument('--numCores', required=False, type=int, default=1, help="number of cores per job (default: 1)") +parser.add_argument('--allowNonValid', action="store_true", help="Allow nonvalid dataset as an input.") +parser.add_argument('--jobFile', required=True, type=str, help="text file with jobs descriptions") +args = parser.parse_args() + +from CRABClient.UserUtilities import config, ClientException, getUsernameFromSiteDB +from CRABAPI.RawCommand import crabCommand +from httplib import HTTPException + +config = config() + +config.General.workArea = args.workArea + +config.JobType.pluginName = 'Analysis' +config.JobType.psetName = args.cfg +config.JobType.maxMemoryMB = args.maxMemory +config.JobType.numCores = args.numCores + +config.Data.inputDBS = args.inputDBS +config.Data.allowNonValidInputDataset = args.allowNonValid +config.General.transferOutputs = True +config.General.transferLogs = False +config.Data.publication = False + +config.Site.storageSite = args.site +config.Data.outLFNDirBase = "/store/user/{}/{}".format(getUsernameFromSiteDB(), args.output) + +if len(args.blacklist) != 0: + config.Site.blacklist = re.split(',', args.blacklist) +if len(args.whitelist) != 0: + config.Site.whitelist = re.split(',', args.whitelist) + +job_names = Set(filter(lambda s: len(s) != 0, re.split(",", args.jobNames))) + +from TauTriggerTools.Common.crabTools import JobCollection +try: + job_collection = JobCollection(args.jobFile, job_names, args.lumiMask, args.jobNameSuffix) + print args.jobFile + print job_collection + print "Splitting: {} with {} units per job".format(args.splitting, args.unitsPerJob) + job_collection.submit(config, args.splitting, args.unitsPerJob) +except RuntimeError as err: + print >> sys.stderr, "ERROR:", str(err) + sys.exit(1) diff --git a/Common/src/GenTruthTools.cc b/Common/src/GenTruthTools.cc index f9e1cc31f47..14f32cb2f09 100644 --- a/Common/src/GenTruthTools.cc +++ b/Common/src/GenTruthTools.cc @@ -7,52 +7,81 @@ This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ namespace analysis { namespace gen_truth { -void FindFinalStateDaughters(const reco::GenParticle& particle, std::set& daughters, - const std::set& pdg_to_exclude) +FinalState::FinalState(const reco::GenParticle& particle, const std::set& pdg_to_exclude, + const std::set& particles_to_exclude) { - if(!particle.daughterRefVector().size()) { - const int abs_pdg = std::abs(particle.pdgId()); - if(!pdg_to_exclude.count(abs_pdg)) - daughters.insert(&particle); - } else { - for(const auto& daughter : particle.daughterRefVector()) - FindFinalStateDaughters(*daughter, daughters, pdg_to_exclude); - } + findFinalStateParticles(particle, pdg_to_exclude, particles_to_exclude); } -LorentzVectorXYZ GetFinalStateMomentum(const reco::GenParticle& particle, std::vector& visible_daughters, - bool excludeInvisible, bool excludeLightLeptons) +void FinalState::findFinalStateParticles(const reco::GenParticle& particle, const std::set& pdg_to_exclude, + const std::set& particles_to_exclude) { - using set = std::set; - using pair = std::pair; - static const set empty = {}; - static const set light_leptons = { 11, 13 }; - static const set invisible_particles = { 12, 14, 16 }; - static const set light_and_invisible = { 11, 12, 13, 14, 16 }; - - static const std::map to_exclude { - { pair(false, false), &empty }, { pair(true, false), &invisible_particles }, - { pair(false, true), &light_leptons }, { pair(true, true), &light_and_invisible }, - }; + if(!particles_to_exclude.count(&particle)) { + if(particle.daughterRefVector().empty()) { + const int abs_pdg = std::abs(particle.pdgId()); + if(!pdg_to_exclude.count(abs_pdg)) + addParticle(particle); + } else { + for(const auto& daughter : particle.daughterRefVector()) + findFinalStateParticles(*daughter, pdg_to_exclude, particles_to_exclude); + } + } +} - std::set daughters_set; - FindFinalStateDaughters(particle, daughters_set, *to_exclude.at(pair(excludeInvisible, false))); - visible_daughters.clear(); - visible_daughters.insert(visible_daughters.begin(), daughters_set.begin(), daughters_set.end()); +void FinalState::addParticle(const reco::GenParticle& particle) +{ + static constexpr int gamma = 22; + static const std::set light_leptons = { 11, 13 }, neutrinos = { 12, 14, 16 }; + const int abs_pdg = std::abs(particle.pdgId()); + std::set types; + if(neutrinos.count(abs_pdg)) { + types.insert(ParticleType::neutrino); + } else { + types.insert(ParticleType::visible); + if(light_leptons.count(abs_pdg)) { + types.insert(ParticleType::light_lepton); + } else if(abs_pdg == gamma) { + types.insert(ParticleType::gamma); + } else { + if(particle.charge() == 0) + types.insert(ParticleType::neutral_hadron); + else + types.insert(ParticleType::charged_hadron); + } + } + for(ParticleType type : types) { + if(!particles[type].count(&particle)) { + particles[type].insert(&particle); + momentum[type] += particle.p4(); + } + } +} - LorentzVectorXYZ p4; - for(auto daughter : visible_daughters) { - if(excludeLightLeptons && light_leptons.count(std::abs(daughter->pdgId())) - && daughter->statusFlags().isDirectTauDecayProduct()) continue; - p4 += daughter->p4(); +const reco::GenParticle* FindTerminalCopy(const reco::GenParticle& genParticle, bool first) +{ + const reco::GenParticle* particle = &genParticle; + while((first && !particle->statusFlags().isFirstCopy()) || (!first && !particle->statusFlags().isLastCopy())) { + bool nextCopyFound = false; + const auto& refVector = first ? particle->motherRefVector() : particle->daughterRefVector(); + for(const auto& p : refVector) { + if(p->pdgId() == particle->pdgId()) { + particle = &(*p); + nextCopyFound = true; + break; + } + } + if(!nextCopyFound) { + const std::string pos = first ? "first" : "last"; + throw analysis::exception("Unable to find the %1% copy") % pos; + } } - return p4; + return particle; } -LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, const reco::GenParticleCollection& genParticles) +bool FindLeptonGenMatch(const reco::GenParticle& particle, LeptonMatchResult& result, + const LorentzVectorM* ref_p4, double* best_match_dr2) { static constexpr int electronPdgId = 11, muonPdgId = 13, tauPdgId = 15; - static double dR2_threshold = std::pow(0.2, 2); static const std::map pt_thresholds = { { electronPdgId, 8 }, { muonPdgId, 8 }, { tauPdgId, 15 } @@ -60,48 +89,92 @@ LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, const reco::GenPartic using pair = std::pair; static const std::map genMatches = { - { { electronPdgId, false }, GenLeptonMatch::Electron }, { { electronPdgId, true }, GenLeptonMatch::TauElectron }, + { { electronPdgId, false }, GenLeptonMatch::Electron }, + { { electronPdgId, true }, GenLeptonMatch::TauElectron }, { { muonPdgId, false }, GenLeptonMatch::Muon }, { { muonPdgId, true }, GenLeptonMatch::TauMuon }, { { tauPdgId, false }, GenLeptonMatch::Tau }, { { tauPdgId, true }, GenLeptonMatch::Tau } }; - LeptonMatchResult result; - double match_dr2 = dR2_threshold; + const bool isTauProduct = particle.statusFlags().isDirectPromptTauDecayProduct(); + if(!((particle.statusFlags().isPrompt() || isTauProduct) && particle.statusFlags().isFirstCopy())) return false; + const int abs_pdg = std::abs(particle.pdgId()); + if(!pt_thresholds.count(abs_pdg)) return false; + + const reco::GenParticle* particle_lastCopy = FindTerminalCopy(particle, false); + FinalState finalState(*particle_lastCopy), finalState_rad(particle, {}, {particle_lastCopy}); + const auto& vis_p4 = finalState.getMomentum(FinalState::ParticleType::visible); + const auto& vis_rad_p4 = finalState_rad.getMomentum(FinalState::ParticleType::visible); + const auto total_vis_p4 = vis_p4 + vis_rad_p4; + + GenLeptonMatch match; + if(abs_pdg == tauPdgId && finalState.count(FinalState::ParticleType::light_lepton)) { + auto light_lepton = *finalState.getParticles(FinalState::ParticleType::light_lepton).begin(); + const int abs_lep_pdg = std::abs(light_lepton->pdgId()); + const double pt_thr = pt_thresholds.at(abs_lep_pdg); + if(light_lepton->pt() > pt_thr || total_vis_p4.pt() < pt_thr) return false; + match = genMatches.at(pair(abs_lep_pdg, true)); + } else { + if(total_vis_p4.pt() <= pt_thresholds.at(abs_pdg)) return false; + match = genMatches.at(pair(abs_pdg, isTauProduct)); + } + if(ref_p4 != nullptr && best_match_dr2 != nullptr) { + const double dr2_vis = ROOT::Math::VectorUtil::DeltaR2(*ref_p4, vis_p4); + const double dr2_tot_vis = ROOT::Math::VectorUtil::DeltaR2(*ref_p4, total_vis_p4); + const double dr2 = std::min(dr2_vis, dr2_tot_vis); + if(dr2 >= *best_match_dr2) return false; + *best_match_dr2 = dr2; + } + + result.match = match; + result.gen_particle_firstCopy = &particle; + result.gen_particle_lastCopy = particle_lastCopy; + result.visible_daughters = finalState.getParticles(FinalState::ParticleType::visible); + result.visible_rad = finalState_rad.getParticles(FinalState::ParticleType::visible); + result.visible_p4 = finalState.getMomentum(FinalState::ParticleType::visible); + result.visible_rad_p4 = finalState_rad.getMomentum(FinalState::ParticleType::visible); + result.n_charged_hadrons = finalState.count(FinalState::ParticleType::charged_hadron); + result.n_neutral_hadrons = finalState.count(FinalState::ParticleType::neutral_hadron); + result.n_gammas = finalState.count(FinalState::ParticleType::gamma); + result.n_gammas_rad = finalState_rad.count(FinalState::ParticleType::gamma); + + return true; +} + +std::vector CollectGenLeptons(const reco::GenParticleCollection& genParticles) +{ + std::vector leptons; for(const reco::GenParticle& particle : genParticles) { - const bool isTauProduct = particle.statusFlags().isDirectPromptTauDecayProduct(); - if((!particle.statusFlags().isPrompt() && !isTauProduct) /*|| !particle.statusFlags().isLastCopy()*/) continue; - - const int abs_pdg = std::abs(particle.pdgId()); - if(!pt_thresholds.count(abs_pdg)) continue; - - std::vector visible_daughters; - const auto particle_p4 = abs_pdg == tauPdgId ? GetFinalStateMomentum(particle, visible_daughters, true, true) - : particle.p4(); - - const double dr2 = ROOT::Math::VectorUtil::DeltaR2(p4, particle_p4); - if(dr2 >= match_dr2) continue; - if(particle_p4.pt() <= pt_thresholds.at(abs_pdg)) continue; - - match_dr2 = dr2; - result.match = genMatches.at(pair(abs_pdg, isTauProduct)); - result.gen_particle = &particle; - result.visible_daughters = visible_daughters; - result.visible_daughters_p4 = particle_p4; - - int n_chargedParticles = 0; - int n_neutralParticles = 0; - for(unsigned n = 0; n < visible_daughters.size(); ++n){ - const reco::GenParticle* gen_visible_particle = visible_daughters.at(n); - if(gen_visible_particle->charge() == 0) - ++n_neutralParticles; - else - ++n_chargedParticles; - } + LeptonMatchResult result; + if(FindLeptonGenMatch(particle, result)) + leptons.push_back(result); + } + return leptons; +} - result.n_chargedParticles = n_chargedParticles; - result.n_neutralParticles = n_neutralParticles; +LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, const reco::GenParticleCollection& genParticles) +{ + static const double dR2_threshold = std::pow(0.2, 2); + LeptonMatchResult result; + double best_match_dr2 = dR2_threshold; + for(const reco::GenParticle& particle : genParticles) + FindLeptonGenMatch(particle, result, &p4, &best_match_dr2); + return result; +} +LeptonMatchResult LeptonGenMatch(const LorentzVectorM& p4, const std::vector& genLeptons) +{ + static const double dR2_threshold = std::pow(0.2, 2); + LeptonMatchResult result; + double best_match_dr2 = dR2_threshold; + for(const LeptonMatchResult& lepton : genLeptons) { + const auto total_vis_p4 = lepton.visible_p4 + lepton.visible_rad_p4; + const double dr2_vis = ROOT::Math::VectorUtil::DeltaR2(p4, lepton.visible_p4); + const double dr2_tot_vis = ROOT::Math::VectorUtil::DeltaR2(p4, total_vis_p4); + const double dr2 = std::min(dr2_vis, dr2_tot_vis); + if(dr2 >= best_match_dr2) continue; + best_match_dr2 = dr2; + result = lepton; } return result; } diff --git a/Common/src/PatHelpers.cc b/Common/src/PatHelpers.cc new file mode 100644 index 00000000000..a6f1a6cdbe3 --- /dev/null +++ b/Common/src/PatHelpers.cc @@ -0,0 +1,118 @@ +/*! Various utility functions. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "TauTriggerTools/Common/interface/PatHelpers.h" +#include "DataFormats/PatCandidates/interface/PackedCandidate.h" +#include "TauTriggerTools/Common/interface/AnalysisTypes.h" + +namespace tau_trigger { + +double MuonIsolation(const pat::Muon& muon) +{ + const double pfIso = muon.pfIsolationR04().sumChargedHadronPt + + std::max(0.0, muon.pfIsolationR04().sumNeutralHadronEt + + muon.pfIsolationR04().sumPhotonEt - 0.5 * muon.pfIsolationR04().sumPUPt); + return pfIso / muon.polarP4().pt(); +} + +std::vector CollectTaus(const LorentzVectorM& muon_p4, const pat::TauCollection& taus, + const std::vector& genLeptons, double deltaR2Thr) +{ + static const std::string mvaIdName = "byIsolationMVArun2017v2DBoldDMwLTraw2017"; + static const std::string deepIdName = "byDeepTau2017v2p1VSjetraw"; + std::map best_tau; + for(const auto& tau : taus) { + auto leadChargedHadrCand = dynamic_cast(tau.leadChargedHadrCand().get()); + if(tau.polarP4().pt() > 18 && std::abs(tau.polarP4().eta()) < 2.3 + && leadChargedHadrCand && std::abs(leadChargedHadrCand->dz()) < 0.2 + && reco::deltaR2(muon_p4, tau.polarP4()) > deltaR2Thr) { + const bool pass_mva_sel = tau.tauID("againstMuonLoose3") > 0.5f; + const bool pass_deep_sel = tau.isTauIDAvailable("byDeepTau2017v2p1VSjetraw") + && tau.tauID("byVVVLooseDeepTau2017v2p1VSe") > 0.5f + && tau.tauID("byVLooseDeepTau2017v2p1VSmu") > 0.5f; + if((pass_mva_sel || pass_deep_sel) && (!best_tau.count(TauSelection::pt) + || best_tau.at(TauSelection::pt)->polarP4().pt() < tau.polarP4().pt())) + best_tau[TauSelection::pt] = τ + if(pass_mva_sel && (!best_tau.count(TauSelection::MVA) + || best_tau.at(TauSelection::MVA)->tauID(mvaIdName) < tau.tauID(mvaIdName))) + best_tau[TauSelection::MVA] = τ + if(pass_deep_sel && (!best_tau.count(TauSelection::DeepTau) + || best_tau.at(TauSelection::DeepTau)->tauID(deepIdName) < tau.tauID(deepIdName))) + best_tau[TauSelection::DeepTau] = τ + } + } + std::map selected_taus; + const gen_truth::LeptonMatchResult selected_gen_tau = SelectGenLeg(genLeptons, true); + const bool has_selected_gen_tau = selected_gen_tau.match != GenLeptonMatch::NoMatch; + bool selected_gen_tau_stored = false; + for(const auto& entry : best_tau) { + const pat::Tau* reco_tau = entry.second; + if(!selected_taus.count(reco_tau)) { + const auto gen_tau = gen_truth::LeptonGenMatch(reco_tau->polarP4(), genLeptons); + const bool has_gen_tau = gen_tau.match != GenLeptonMatch::NoMatch; + selected_taus[reco_tau] = TauEntry{reco_tau, gen_tau, 0}; + if(has_selected_gen_tau && has_gen_tau + && selected_gen_tau.gen_particle_firstCopy == gen_tau.gen_particle_firstCopy) { + selected_gen_tau_stored = true; + selected_taus[reco_tau].selection |= static_cast(TauSelection::gen); + } + } + selected_taus[reco_tau].selection |= static_cast(entry.first); + } + if(has_selected_gen_tau && !selected_gen_tau_stored) { + const pat::Tau* reco_tau = nullptr; + for(const auto& tau : taus) { + const auto gen_tau = gen_truth::LeptonGenMatch(tau.polarP4(), genLeptons); + if(gen_tau.match != GenLeptonMatch::NoMatch + && gen_tau.gen_particle_firstCopy == selected_gen_tau.gen_particle_firstCopy) { + reco_tau = τ + break; + } + } + if(selected_taus.count(reco_tau)) + throw exception("Inconsistency in CollectTaus algorithm."); + selected_taus[reco_tau] = TauEntry{reco_tau, selected_gen_tau, static_cast(TauSelection::gen)}; + } + + std::vector result; + for(const auto& entry : selected_taus) + result.push_back(entry.second); + return result; +} + +bool PassBtagVeto(const LorentzVectorM& muon_p4, const LorentzVectorM& tau_p4, + const pat::JetCollection& jets, double btagThreshold, double deltaR2Thr) +{ + if(btagThreshold > 0) { + for(const pat::Jet& jet : jets) { + const auto btag = jet.bDiscriminator("pfDeepFlavourJetTags:probb") + + jet.bDiscriminator("pfDeepFlavourJetTags:probbb") + + jet.bDiscriminator("pfDeepFlavourJetTags:problepb"); + if(reco::deltaR2(muon_p4, jet.polarP4()) > deltaR2Thr + && reco::deltaR2(tau_p4, jet.polarP4()) > deltaR2Thr + && jet.polarP4().pt() > 20 && std::abs(jet.polarP4().eta()) < 2.4 + && btag > btagThreshold) + return false; + } + } + return true; +} + +gen_truth::LeptonMatchResult SelectGenLeg(const std::vector& genLeptons, bool is_tau) +{ + static const std::map> all_matches = { + { true, { GenLeptonMatch::Tau } }, + { false, { GenLeptonMatch::Muon, GenLeptonMatch::TauMuon } }, + }; + const auto& matches = all_matches.at(is_tau); + gen_truth::LeptonMatchResult leg; + for(const auto& lepton : genLeptons) { + if(matches.count(lepton.match) && (leg.match == GenLeptonMatch::NoMatch + || leg.visible_p4.pt() < lepton.visible_p4.pt())) { + leg = lepton; + } + } + return leg; +} + +} // namespace tau_trigger diff --git a/Common/src/RootExt.cpp b/Common/src/RootExt.cpp new file mode 100644 index 00000000000..26e141bbc05 --- /dev/null +++ b/Common/src/RootExt.cpp @@ -0,0 +1,163 @@ +/*! Common CERN ROOT extensions. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "TauTriggerTools/Common/interface/RootExt.h" + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "TauTriggerTools/Common/interface/exception.h" + +namespace root_ext { + +std::shared_ptr CreateRootFile(const std::string& file_name, ROOT::ECompressionAlgorithm compression, + int compression_level) +{ + std::shared_ptr file(TFile::Open(file_name.c_str(), "RECREATE", "", compression * 100 + compression_level)); + if(!file || file->IsZombie()) + throw analysis::exception("File '%1%' not created.") % file_name; + return file; +} + +std::shared_ptr OpenRootFile(const std::string& file_name) +{ + std::shared_ptr file(TFile::Open(file_name.c_str(), "READ")); + if(!file || file->IsZombie()) + throw analysis::exception("File '%1%' not opened.") % file_name; + return file; +} + +void WriteObject(const TObject& object, TDirectory* dir, const std::string& name) +{ + if(!dir) + throw analysis::exception("Can't write object to nullptr."); + const std::string name_to_write = name.size() ? name : object.GetName(); + dir->WriteTObject(&object, name_to_write.c_str(), "Overwrite"); +} + + +TDirectory* GetDirectory(TDirectory& root_dir, const std::string& name, bool create_if_needed) +{ + if(!name.size() || (name.size() == 1 && name.at(0) == '/')) + return &root_dir; + TDirectory* dir = root_dir.GetDirectory(name.c_str()); + if(!dir && create_if_needed) { + const size_t pos = name.find("/"); + if(pos == std::string::npos || pos == name.size() - 1) { + root_dir.mkdir(name.c_str()); + dir = root_dir.GetDirectory(name.c_str()); + } else { + const std::string first_dir_name = name.substr(0, pos), sub_dirs_path = name.substr(pos + 1); + TDirectory* first_dir = GetDirectory(root_dir, first_dir_name, true); + dir = GetDirectory(*first_dir, sub_dirs_path, true); + } + } + + if(!dir) + throw analysis::exception("Unable to get directory '%1%' from the root directory '%2%'.") + % name % root_dir.GetName(); + return dir; +} + +ClassInheritance FindClassInheritance(const std::string& class_name) +{ + static std::map classes; + auto iter = classes.find(class_name); + if(iter != classes.end()) + return iter->second; + TClass *cl = gROOT->GetClass(class_name.c_str()); + if(!cl) + throw analysis::exception("Unable to get TClass for class named '%1%'.") % class_name; + + ClassInheritance inheritance; + if(cl->InheritsFrom("TH1")) + inheritance = ClassInheritance::TH1; + else if(cl->InheritsFrom("TTree")) + inheritance = ClassInheritance::TTree; + else if(cl->InheritsFrom("TDirectory")) + inheritance = ClassInheritance::TDirectory; + else + throw analysis::exception("Unknown class inheritance for class named '%1%'.") % class_name; + classes[class_name] = inheritance; + return inheritance; +} +} // namespace root_ext + + +std::ostream& operator<<(std::ostream& s, const TVector3& v) +{ + s << "(" << v.x() << ", " << v.y() << ", " << v.z() << ")"; + return s; +} + +std::ostream& operator<<(std::ostream& s, const TLorentzVector& v) +{ + s << "(pt=" << v.Pt() << ", eta=" << v.Eta() << ", phi=" << v.Phi() << ", E=" << v.E() << ", m=" << v.M() << ")"; + return s; +} + +// Based on TMatrixD::Print code. +std::ostream& operator<<(std::ostream& s, const TMatrixD& matrix) +{ + if (!matrix.IsValid()) { + s << "Matrix is invalid"; + return s; + } + + //build format + static const char *format = "%11.4g "; + char topbar[100]; + snprintf(topbar,100, format, 123.456789); + size_t nch = strlen(topbar) + 1; + if (nch > 18) nch = 18; + char ftopbar[20]; + for(size_t i = 0; i < nch; i++) ftopbar[i] = ' '; + size_t nk = 1 + size_t(std::log10(matrix.GetNcols())); + snprintf(ftopbar+nch/2,20-nch/2,"%s%zud","%",nk); + size_t nch2 = strlen(ftopbar); + for (size_t i = nch2; i < nch; i++) ftopbar[i] = ' '; + ftopbar[nch] = '|'; + ftopbar[nch+1] = 0; + + s << matrix.GetNrows() << "x" << matrix.GetNcols() << " matrix"; + + size_t cols_per_sheet = 5; + if (nch <= 8) cols_per_sheet =10; + const size_t ncols = static_cast(matrix.GetNcols()); + const size_t nrows = static_cast(matrix.GetNrows()); + const size_t collwb = static_cast(matrix.GetColLwb()); + const size_t rowlwb = static_cast(matrix.GetRowLwb()); + nk = 5+nch*std::min(cols_per_sheet, static_cast(matrix.GetNcols())); + for (size_t i = 0; i < nk; i++) + topbar[i] = '-'; + topbar[nk] = 0; + for (size_t sheet_counter = 1; sheet_counter <= ncols; sheet_counter += cols_per_sheet) { + s << "\n |"; + for (size_t j = sheet_counter; j < sheet_counter+cols_per_sheet && j <= ncols; j++) { + char ftopbar_out[100]; + snprintf(ftopbar_out, 100, ftopbar, j+collwb-1); + s << ftopbar_out; + } + s << "\n" << topbar << "\n"; + if (matrix.GetNoElements() <= 0) continue; + for (size_t i = 1; i <= nrows; i++) { + char row_out[100]; + snprintf(row_out, 100, "%4zu |",i+rowlwb-1); + s << row_out; + for (size_t j = sheet_counter; j < sheet_counter+cols_per_sheet && j <= ncols; j++) { + snprintf(row_out, 100, format, matrix(static_cast(i+rowlwb-1), + static_cast(j+collwb-1))); + s << row_out; + } + s << "\n"; + } + } + return s; +} diff --git a/Common/src/TextIO.cpp b/Common/src/TextIO.cpp new file mode 100644 index 00000000000..d4a31be8328 --- /dev/null +++ b/Common/src/TextIO.cpp @@ -0,0 +1,141 @@ +/*! Definition of primitives for a text based input/output. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "TauTriggerTools/Common/interface/TextIO.h" + +#include +#include +#include +#include +#include +#include "TauTriggerTools/Common/interface/exception.h" + +namespace analysis { + +std::string RemoveFileExtension(const std::string& file_name) +{ + return boost::filesystem::change_extension(file_name, "").string(); +} + +std::string GetFileNameWithoutPath(const std::string& file_name) +{ + const size_t lastindex = file_name.find_last_of("/"); + if(lastindex == std::string::npos) + return file_name; + else + return file_name.substr(lastindex+1); +} + +std::vector SplitValueList(const std::string& _values_str, bool allow_duplicates, + const std::string& separators, bool enable_token_compress) +{ + std::string values_str = _values_str; + std::vector result; + if(enable_token_compress) + boost::trim_if(values_str, boost::is_any_of(separators)); + if(!values_str.size()) return result; + const auto token_compress = enable_token_compress ? boost::algorithm::token_compress_on + : boost::algorithm::token_compress_off; + boost::split(result, values_str, boost::is_any_of(separators), token_compress); + if(!allow_duplicates) { + std::unordered_set set_result; + for(const std::string& value : result) { + if(set_result.count(value)) + throw exception("Value '%1%' listed more than once in the value list '%2%'.") % value % values_str; + set_result.insert(value); + } + } + return result; +} + +std::vector ReadValueList(std::istream& stream, size_t number_of_items, bool allow_duplicates, + const std::string& separators, bool enable_token_compress) +{ + const auto stream_exceptions = stream.exceptions(); + stream.exceptions(std::istream::goodbit); + try { + std::vector result; + std::unordered_set set_result; + const auto predicate = boost::is_any_of(separators); + size_t n = 0; + for(; n < number_of_items; ++n) { + std::string value; + while(true) { + const auto c = stream.get(); + if(!stream.good()) { + if(stream.eof()) break; + throw exception("Failed to read values from stream."); + } + if(predicate(c)) { + if(!value.size() && enable_token_compress) continue; + break; + } + value.push_back(static_cast(c)); + } + if(!allow_duplicates && set_result.count(value)) + throw exception("Value '%1%' listed more than once in the input stream.") % value; + result.push_back(value); + set_result.insert(value); + } + if(n != number_of_items) + throw exception("Expected %1% items, while read only %2%.") % number_of_items % n; + + stream.clear(); + stream.exceptions(stream_exceptions); + return result; + } catch(exception&) { + stream.clear(); + stream.exceptions(stream_exceptions); + throw; + } +} + +StVariable::StVariable() : value(0), error_up(0), error_low(0) {} +StVariable::StVariable(double _value, double _error_up, double _error_low) : + value(_value), error_up(_error_up), error_low(_error_low) {} + +int StVariable::precision_up() const +{ + return error_up != 0. + ? static_cast(std::floor(std::log10(error_up)) - number_of_significant_digits_in_error + 1) + : max_precision; +} + +int StVariable::precision_low() const +{ + return error_low != 0. + ? static_cast(std::floor(std::log10(error_low)) - number_of_significant_digits_in_error + 1) + : max_precision; +} + +int StVariable::precision() const { return std::max(precision_up(), precision_low()); } + +int StVariable::decimals_to_print_low() const { return std::max(0, -precision_low()); } +int StVariable::decimals_to_print_up() const { return std::max(0, -precision_up()); } +int StVariable::decimals_to_print() const { return std::min(decimals_to_print_low(), decimals_to_print_up()); } + +std::string StVariable::ToLatexString() const +{ + const ValueType ten_pow_p = std::pow(10.0, precision()); + const ValueType value_rounded = std::round(value / ten_pow_p) * ten_pow_p; + const ValueType error_up_rounded = std::ceil(error_up / ten_pow_p) * ten_pow_p; + const ValueType error_low_rounded = std::ceil(error_low / ten_pow_p) * ten_pow_p; + + std::ostringstream ss; + ss << std::setprecision(decimals_to_print()) << std::fixed; + if(error_up == 0 && error_low == 0) + ss << value_rounded<< "^{+0}_{-0}"; + else if(!std::isnan(error_low)) + ss << value_rounded<< "^{+" << error_up_rounded << "}_{-" << error_low_rounded << "}"; + else if(std::isnan(error_low)) { + ss << value_rounded << " \\pm "; + if(error_up == 0) + ss << "0"; + else + ss << error_up_rounded; + } + + return ss.str(); +} + +} // namespace analysis diff --git a/Common/src/TriggerDescriptor.cc b/Common/src/TriggerDescriptor.cc new file mode 100644 index 00000000000..1f244896ca6 --- /dev/null +++ b/Common/src/TriggerDescriptor.cc @@ -0,0 +1,147 @@ +/*! Definition of trigger results. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "TauTriggerTools/Common/interface/TriggerDescriptor.h" +#include +#include "TauTriggerTools/Common/interface/TextIO.h" + +namespace tau_trigger { +unsigned GetTriggerObjectTypes(const pat::TriggerObjectStandAlone& triggerObject) +{ + unsigned type = 0; + if(triggerObject.hasTriggerObjectType(trigger::TriggerElectron)) + type |= static_cast(analysis::LegType::e); + if(triggerObject.hasTriggerObjectType(trigger::TriggerMuon)) + type |= static_cast(analysis::LegType::mu); + if(triggerObject.hasTriggerObjectType(trigger::TriggerTau)) + type |= static_cast(analysis::LegType::tau); + if(triggerObject.hasTriggerObjectType(trigger::TriggerJet)) + type |= static_cast(analysis::LegType::jet); + return type; +} + +const l1t::Tau* MatchL1Taus(const analysis::LorentzVectorM& ref_p4, const BXVector& l1Taus, double deltaR2Thr, + int bx_value) +{ + const l1t::Tau* matched_tau = nullptr; + double dR2_bestMatch = deltaR2Thr; + for(auto iter = l1Taus.begin(0); iter != l1Taus.end(0); ++iter) { + const double deltaR2 = ROOT::Math::VectorUtil::DeltaR2(ref_p4, iter->polarP4()); + if(deltaR2 < dR2_bestMatch) { + matched_tau = &(*iter); + dR2_bestMatch = deltaR2; + } + } + return matched_tau; +} + +TriggerDescriptorCollection::TriggerDescriptorCollection(const edm::VParameterSet& trig_vpset) +{ + if(trig_vpset.size() > MaxNumberOfTriggers) + throw analysis::exception("The max number of triggers is exceeded"); + for(const auto& pset : trig_vpset) { + TriggerDescriptor desc; + desc.path = pset.getParameter("path"); + desc.is_tag = pset.getParameter("is_tag"); + const std::vector leg_types = pset.getParameter>("leg_types"); + const std::vector filters = pset.getParameter>("filters"); + if(desc_indices.count(desc.path)) + throw analysis::exception("Duplicated trigger path = '%1%'.") % desc.path; + if(leg_types.size() != filters.size()) + throw analysis::exception("Inconsitent leg_types and filters for trigger path = '%1%'.") % desc.path; + + static const std::string regex_format = "^%1%[0-9]+$"; + const std::string regex_str = boost::str(boost::format(regex_format) % desc.path); + desc.regex = boost::regex(regex_str); + desc.type_mask = 0; + for(size_t n = 0; n < leg_types.size(); ++n) { + TriggerLeg leg; + leg.type = analysis::Parse(leg_types.at(n)); + leg.filters = analysis::SplitValueList(filters.at(n), false); + desc.type_mask |= static_cast(leg.type); + desc.legs.push_back(leg); + } + const size_t desc_index = descs.size(); + descs.push_back(desc); + desc_indices[desc.path] = desc_index; + if(desc.is_tag) + tag_desc_indices.insert(desc_index); + } +} + +FullTriggerResults TriggerDescriptorCollection::matchTriggerObjects( + const edm::TriggerResults& triggerResults, const pat::TriggerObjectStandAloneCollection& triggerObjects, + const analysis::LorentzVectorM& ref_p4, const std::vector& triggerNames, double deltaR2Thr, + bool include_tag_paths, bool include_nontag_paths) +{ + FullTriggerResults results; + + std::vector obj_types; + for(const auto& hlt_obj : triggerObjects) { + const unsigned obj_type = GetTriggerObjectTypes(hlt_obj); + obj_types.push_back(obj_type); + } + + for(size_t desc_index = 0; desc_index < descs.size(); ++desc_index) { + const auto& trig_desc = descs.at(desc_index); + if(!include_tag_paths && trig_desc.is_tag) continue; + if(!include_nontag_paths && !trig_desc.is_tag) continue; + if(trig_desc.global_index < 0) continue; + const bool accept = triggerResults.accept(trig_desc.global_index); + results.accept.set(desc_index, accept); + const std::string& path_name = triggerNames.at(trig_desc.global_index); + TriggerObjectMatchResult best_match; + boost::optional best_matched_obj_index; + double dR2_bestMatch = deltaR2Thr; + for(size_t obj_index = 0; obj_index < triggerObjects.size(); ++obj_index) { + const auto& hlt_obj = triggerObjects.at(obj_index); + const double deltaR2 = ROOT::Math::VectorUtil::DeltaR2(ref_p4, hlt_obj.polarP4()); + if(deltaR2 >= deltaR2Thr) continue; + if(!hlt_obj.hasPathName(path_name, true, false)) continue; + if((obj_types.at(obj_index) & trig_desc.type_mask) != 0) { + if(deltaR2 < dR2_bestMatch) { + best_matched_obj_index = obj_index; + dR2_bestMatch = deltaR2; + } + results.acceptAndMatch.set(desc_index); + } + auto& match_result = results.matchResults[obj_index]; + match_result.hltObjIndex = obj_index; + match_result.objType = obj_types.at(obj_index); + match_result.hasPathName.set(desc_index); + match_result.descIndices.insert(desc_index); + match_result.filters = hlt_obj.filterLabels(); + } + + if(best_matched_obj_index) + results.matchResults[*best_matched_obj_index].isBestMatch.set(desc_index); + } + + return results; +} + +void TriggerDescriptorCollection::updateGlobalIndices(const std::vector& triggerNames) +{ + std::map globalToPos, posToGlobal; + for(size_t pos = 0; pos < descs.size(); ++pos) { + descs.at(pos).global_index = -1; + for(size_t global_index = 0; global_index < triggerNames.size(); ++global_index) { + if(boost::regex_match(triggerNames.at(global_index), descs.at(pos).regex)) { + if(globalToPos.count(global_index)) { + throw analysis::exception("Trigger '%1%' matches with two path patterns: '%2%' and '%3%'.") + % triggerNames.at(global_index) % descs.at(globalToPos.at(global_index)).path + % descs.at(pos).path; + } + if(posToGlobal.count(pos)) { + throw analysis::exception("Path pattern '%1%' matches with two triggers: '%2' and '%3%'.") + % descs.at(pos).path % triggerNames.at(posToGlobal.at(pos)) % triggerNames.at(global_index); + } + globalToPos[global_index] = pos; + posToGlobal[pos] = global_index; + descs.at(pos).global_index = global_index; + } + } + } +} + +} // namespace tau_trigger diff --git a/Common/src/exception.cpp b/Common/src/exception.cpp new file mode 100644 index 00000000000..76397d81971 --- /dev/null +++ b/Common/src/exception.cpp @@ -0,0 +1,51 @@ +/*! Definition of the base exception class for the analysis namespace. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "TauTriggerTools/Common/interface/exception.h" + +namespace analysis { + +exception::exception(const std::string& message) noexcept : msg_valid(false), f_str(message) +{ + try { + f_msg = std::make_unique(f_str); + f_msg->exceptions(boost::io::all_error_bits); + } catch(boost::io::bad_format_string&) { + msg = "bad formatted error message = '" + f_str + "'."; + msg_valid = true; + } +} + +exception::exception(const exception& e) noexcept : msg(e.msg), msg_valid(e.msg_valid), f_str(e.f_str) +{ + if(e.f_msg) + f_msg = std::make_unique(*e.f_msg); +} + +exception::exception(exception&& e) noexcept : + msg(e.msg), msg_valid(e.msg_valid), f_msg(std::move(e.f_msg)), f_str(e.f_str) {} +const char* exception::what() const noexcept { return message().c_str(); } + +const std::string& exception::message() const noexcept +{ + if(!msg_valid) { + try { + msg = boost::str(*f_msg); + } catch(boost::io::too_few_args&) { + msg = "too few arguments are provided to the error message = '" + f_str + "'."; + } catch(std::exception& e) { + process_unexpected_exception(e); + } + msg_valid = true; + } + return msg; +} + +void exception::process_unexpected_exception(const std::exception& e) const +{ + msg = "An exception has been raised while creating an error message. Error message = '" + f_str + + "'. Exception message = '" + e.what() + "'."; + msg_valid = true; +} + +} // namespace analysis diff --git a/README.md b/README.md new file mode 100644 index 00000000000..6fbbf133f51 --- /dev/null +++ b/README.md @@ -0,0 +1,88 @@ +##--- STEPS TO COMPUTE TRIGGER SCALE FACTORS ------## + +(1) Setup the area following this recipe: + +cmsrel CMSSW_10_2_20 + +cd CMSSW_10_2_20/src + +cmsenv + +git cms-init + +git cms-addpkg RecoMET/METFilters + +git cms-merge-topic cms-egamma:EgammaPostRecoTools + +git clone https://github.com/kandrosov/TauTriggerTools $CMSSW_BASE/src/TauTriggerTools + +cd $CMSSW_BASE/src/TauTriggerTools + +git branch -a + +git checkout remotes/origin/new-tuple + +cd $CMSSW_BASE/src + +scram b -j 10 + +(2) Submit CRAB jobs (after setting up CRAB credentials) for all the samples and all the eras via. the commands (change --site option to the site where you have write access): + + Common/scripts/crab_submit.py --workArea work-area-2016 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2016 TauTagAndProbe/data/2016/crab/TT.txt + Common/scripts/crab_submit.py --workArea work-area-2017 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2017 TauTagAndProbe/data/2017/crab/TT.txt + Common/scripts/crab_submit.py --workArea work-area-2018 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2018 TauTagAndProbe/data/2018/crab/TT.txt + Common/scripts/crab_submit.py --workArea work-area-2016 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2016 TauTagAndProbe/data/2016/crab/W.txt + Common/scripts/crab_submit.py --workArea work-area-2017 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2017 TauTagAndProbe/data/2017/crab/W.txt + Common/scripts/crab_submit.py --workArea work-area-2018 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2018 TauTagAndProbe/data/2018/crab/W.txt + Common/scripts/crab_submit.py --workArea work-area-2016 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2016 TauTagAndProbe/data/2016/crab/DY.txt + Common/scripts/crab_submit.py --workArea work-area-2017 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2017 TauTagAndProbe/data/2017/crab/DY.txt + Common/scripts/crab_submit.py --workArea work-area-2018 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2018 TauTagAndProbe/data/2018/crab/DY.txt + Common/scripts/crab_submit.py --workArea work-area-2016 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2016 TauTagAndProbe/data/2016/crab/Data_SingleMuon.txt + Common/scripts/crab_submit.py --workArea work-area-2017 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2017 TauTagAndProbe/data/2017/crab/Data_SingleMuon.txt + Common/scripts/crab_submit.py --workArea work-area-2018 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2018 TauTagAndProbe/data/2018/crab/Data_SingleMuon_ABC.txt + Common/scripts/crab_submit.py --workArea work-area-2018 --cfg TauTagAndProbe/test/produceTuples.py --site T2_EE_Estonia --output tau_hlt_prod_2018 TauTagAndProbe/data/2018/crab/Data_SingleMuon_D.txt + + +(3) After job completion, merge the CRAB output files (for the respective samples and eras) into single root files using hadd and move them into a dedictated folder in your personal mass storage area (called "/hdfs/local/ram/run2_ntuples" below): + + + mkdir -p /hdfs/local/ram/run2_ntuples + mkdir -p /hdfs/local/ram/run2_ntuples/full + mkdir -p /hdfs/local/ram/run2_ntuples/full/2016 + mkdir -p /hdfs/local/ram/run2_ntuples/full/2017 + mkdir -p /hdfs/local/ram/run2_ntuples/full/2018 + + ##------ STEPS FOR HADD FOR THE TTBAR SAMPLES (OTHER SAMPLES WILL BE SIMILAR) -----## + hadd -f TTToSemiLeptonic.root /hdfs/cms/store/user/rdewanje/tau_hlt_prod_2016/TTToSemiLeptonic*/*/*/000*/*.root + mv TTToSemiLeptonic.root /hdfs/local/ram/run2_ntuples/full/2016/ + hadd -f TTTo2L2Nu.root /hdfs/cms/store/user/rdewanje/tau_hlt_prod_2016/TTTo2L2Nu*/*/*/000*/*.root + mv TTTo2L2Nu.root /hdfs/local/ram/run2_ntuples/full/2016/ + + hadd -f TTToSemiLeptonic.root /hdfs/cms/store/user/rdewanje/tau_hlt_prod_2017/TTToSemiLeptonic*/*/*/000*/*.root + mv TTToSemiLeptonic.root /hdfs/local/ram/run2_ntuples/full/2017/ + hadd -f TTTo2L2Nu.root /hdfs/cms/store/user/rdewanje/tau_hlt_prod_2017/TTTo2L2Nu*/*/*/000*/*.root + mv TTTo2L2Nu.root /hdfs/local/ram/run2_ntuples/full/2017/ + + hadd -f TTToSemiLeptonic.root /hdfs/cms/store/user/rdewanje/tau_hlt_prod_2018/TTToSemiLeptonic*/*/*/000*/*.root + mv TTToSemiLeptonic.root /hdfs/local/ram/run2_ntuples/full/2018/ + hadd -f TTTo2L2Nu.root /hdfs/cms/store/user/rdewanje/tau_hlt_prod_2018/TTTo2L2Nu*/*/*/000*/*.root + mv TTTo2L2Nu.root /hdfs/local/ram/run2_ntuples/full/2018/ + . + . + . + + ##-------ALSO COPY THE PILEUP INFO FOR THE RESPECTIVE ERAS TO THIS DIRECTORY ------## + cp TauTagAndProbe/data/2016/Pileup_Data2016.root /hdfs/local/ram/run2_ntuples + cp TauTagAndProbe/data/2017/Pileup_Data2017.root /hdfs/local/ram/run2_ntuples + cp TauTagAndProbe/data/2018/Pileup_Data2018.root /hdfs/local/ram/run2_ntuples + + +(4) After getting all the merged Run-2 Ntuples inside their respective (era-wise) sub-directories inside "run2_ntuples" folder execute this for skimming, Bkg. estimation, creation/fit of turn-ons and Tau Trigger SF computation: + + python processNtuples_stage2.py + +(5a) The New Trigger scale factors (computed separately for true and fake taus) would be available inside the files named: "NewTriggerSFs_$ERA.root" (where, $ERA = 2016/2017/2018). + +(5b) The Old Trigger scale factors (computed in 2 different ways: adding all fake-taus to DY MC process, subtracting all fake-taus from data) would be available inside the files named: "turn_on_$ERA_add-to-dy-mc_fitted_LATEST.root" and "turn_on_$ERA_subtract-from-data_fitted_LATEST.root" (where, $ERA = 2016/2017/2018). + +(6) The plots for both old and new Trigger scale factors will be located inside the directory "Tau_Trigger_sf_plots" in .pdf, .root and .png formats for all Tau ID WPs, channels and tau decay modes. diff --git a/TauTagAndProbe/BuildFile.xml b/TauTagAndProbe/BuildFile.xml deleted file mode 100644 index 183a70ed942..00000000000 --- a/TauTagAndProbe/BuildFile.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - diff --git a/TauTagAndProbe/README.md b/TauTagAndProbe/README.md index a653a457d56..27a70375994 100644 --- a/TauTagAndProbe/README.md +++ b/TauTagAndProbe/README.md @@ -1,70 +1,46 @@ # TauTagAndProbe Set of tools to evaluate tau trigger performance on T&P -### Install instructions +### How to install + ``` -cmsrel CMSSW_10_1_2_patch2 -cd CMSSW_10_1_2_patch2/src +cmsrel CMSSW_10_2_16 +cd CMSSW_10_2_16/src cmsenv -git clone https://github.com/tstreble/TauTagAndProbe -cd TauTagAndProbe -git checkout master_HLT -cd - +git cms-init +git cms-addpkg RecoMET/METFilters +git cms-merge-topic cms-egamma:EgammaPostRecoTools +# if you want to run DeepTau +git cms-merge-topic -u cms-tau-pog:CMSSW_10_2_X_tau-pog_DeepTau2017v2p1_nanoAOD + +git clone -o cms-tau-pog git@github.com:cms-tau-pog/TauTriggerTools.git scram b -j4 ``` +### How to run -### Producing TagAndProbe ntuples - -Set flag isMC and isMINIAOD according to sample in test/test.py - -HLT path used specified in python/MCAnalysis_cff.py (MC) or python/tagAndProbe_cff.py (data) - -Launch test_2018.py - -To apply standard Z->mu+tauh TagAndProbe selections, mass cuts can be applied at production level by setting useMassCuts = cms.bool(True) in the TauTagAndProbeFilter module or reproducing those selections in the ntuple with mT<30 && 40Scan("triggerNames","","colsize=100") -******************************************************************************************************************* -* Row * triggerNames * -******************************************************************************************************************* -* 0 * HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v * -* 1 * HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_TightID_SingleL1_v * -* 2 * HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_SingleL1_v * -* 3 * HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_TightID_SingleL1_v * -* 4 * HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_SingleL1_v * -* 5 * HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_TightID_SingleL1_v * -... +cmsRun TauTriggerTools/TauTagAndProbe/test/produceTuples.py inputFiles=DY_2018.root fileNamePrefix=file: period=Run2018 isMC=True runDeepTau=True maxEvents=1000 ``` -The Row of the path correspond to the bit number in the tauTriggerBits variable. - -In the example presented here, the decision of the MediumChargedIsoPFTau20 leg can be checked for instance by requiring (tauTriggerBits>>2)&1 (matching with tag muon + offline tau of 0.5 included). +To list all the available options run: +``` +python TauTriggerTools/TauTagAndProbe/test/produceTuples.py help +``` -### Plotting: mostly turn-ons -Any basic check can be performed using those Ntuples (efficiency vs pT, eta-phi...) using custom code developed by the user. - -A more fancy package is available to produce turn-on plots with CB fits. - -For this, the Ntuples must first be converted using the script test/convertTreeForFitting.py (blame RooFit for not being able to deal with custom boolean cuts). - -The package for plotting is available in test/fitter/ (to be compiled with make). - -The CB fit can be run using a as an example test/fitter/hlt_turnOn_fitter.par (includes example for L1 and HLT turnons w/ subtraction of SS mu+tauh events to take into account contamination from fake taus using bkgSubW weight). +### How to submit jobs on CRAB -To be launched with +Submitting task: ``` -./fit.exe run/hlt_turnOn_fitter.par +crab_submit.py --workArea work-area --cfg TauTriggerTools/TauTagAndProbe/test/produceTuples.py --site T2_IT_Pisa --output trigger_tuples TauTagAndProbe/data/2018/DY.txt ``` -The "Michelangelo" turn-on plot can then be produced adapting the script test/fitter/results/plot_turnOn_Data_vs_MC.py - +To list all the available options run: +``` +crab_submit.py --help +``` +Checking status of the submitted tasks: +``` +crab_cmd.py --workArea work-area --cmd status +``` diff --git a/TauTagAndProbe/data/2016/Pileup_Data2016.root b/TauTagAndProbe/data/2016/Pileup_Data2016.root new file mode 100644 index 00000000000..5c346d30e2e Binary files /dev/null and b/TauTagAndProbe/data/2016/Pileup_Data2016.root differ diff --git a/TauTagAndProbe/data/2016/crab/DY.txt b/TauTagAndProbe/data/2016/crab/DY.txt new file mode 100644 index 00000000000..e3b33a9cb51 --- /dev/null +++ b/TauTagAndProbe/data/2016/crab/DY.txt @@ -0,0 +1,5 @@ +period=Run2016 isMC=True runDeepTau=True + +#DYJetsToLL_M-50-amcatnloFXFX_ext2 /DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3_ext2-v1/MINIAODSIM +DYJetsToLL_M-50_ext1 /DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3_ext1-v2/MINIAODSIM +DYJetsToLL_M-50_ext2 /DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3_ext2-v2/MINIAODSIM diff --git a/TauTagAndProbe/data/2016/crab/Data_SingleMuon.txt b/TauTagAndProbe/data/2016/crab/Data_SingleMuon.txt new file mode 100644 index 00000000000..1f15eb638db --- /dev/null +++ b/TauTagAndProbe/data/2016/crab/Data_SingleMuon.txt @@ -0,0 +1,11 @@ +lumiMask=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/ReReco/Final/Cert_271036-284044_13TeV_23Sep2016ReReco_Collisions16_JSON.txt +period=Run2016 isMC=False runDeepTau=True + +#SingleMuon_Run2016B_ver1 /SingleMuon/Run2016B-17Jul2018_ver1-v1/MINIAOD +SingleMuon_Run2016B_ver2 /SingleMuon/Run2016B-17Jul2018_ver2-v1/MINIAOD +SingleMuon_Run2016C /SingleMuon/Run2016C-17Jul2018-v1/MINIAOD +SingleMuon_Run2016D /SingleMuon/Run2016D-17Jul2018-v1/MINIAOD +SingleMuon_Run2016E /SingleMuon/Run2016E-17Jul2018-v1/MINIAOD +SingleMuon_Run2016F /SingleMuon/Run2016F-17Jul2018-v1/MINIAOD +SingleMuon_Run2016G /SingleMuon/Run2016G-17Jul2018-v1/MINIAOD +SingleMuon_Run2016H /SingleMuon/Run2016H-17Jul2018-v1/MINIAOD diff --git a/TauTagAndProbe/data/2016/crab/TT.txt b/TauTagAndProbe/data/2016/crab/TT.txt new file mode 100644 index 00000000000..0cbb5e25ae4 --- /dev/null +++ b/TauTagAndProbe/data/2016/crab/TT.txt @@ -0,0 +1,4 @@ +period=Run2016 isMC=True runDeepTau=True + +TTTo2L2Nu_TuneCP5_PSweights_powheg_pythia8_2016 /TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3-v1/MINIAODSIM +TTToSemiLeptonic_TuneCP5_PSweights_powheg_pythia8_2016 /TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3-v1/MINIAODSIM diff --git a/TauTagAndProbe/data/2016/crab/W.txt b/TauTagAndProbe/data/2016/crab/W.txt new file mode 100644 index 00000000000..ea805311c4b --- /dev/null +++ b/TauTagAndProbe/data/2016/crab/W.txt @@ -0,0 +1,4 @@ +period=Run2016 isMC=True runDeepTau=True + +WJetsToLNu /WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3-v2/MINIAODSIM +WJetsToLNu_ext2 /WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3_ext2-v2/MINIAODSIM diff --git a/TauTagAndProbe/data/2016/triggers.json b/TauTagAndProbe/data/2016/triggers.json new file mode 100644 index 00000000000..cc589f74d53 --- /dev/null +++ b/TauTagAndProbe/data/2016/triggers.json @@ -0,0 +1,82 @@ +{ + "HLT_IsoMu27_v": { + "filters": [ [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07" ] ], + "leg_types": [ "mu" ], + "is_tag": 1 + }, + "HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTauJet20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19LooseIsoPFTau20" ], + [ "hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterIsoMu19LooseIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ], + "selection_etau": { "l1Tau_pt": 26, "l1Tau_hwIso": 0, "hltObj_pt": 30 }, + "target_channels": [ "etau", "mutau" ] + }, + "HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu18erIorSingleMu20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu19LooseIsoPFTau20" ], + [ "hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu19LooseIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "etau", "mutau" ] + }, + "HLT_IsoMu19_eta2p1_LooseCombinedIsoPFTau20_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTauJet20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19LooseCombinedIsoPFTau20" ], + [ "hltPFTau20TrackLooseCombinedIsoAgainstMuon", "hltOverlapFilterIsoMu19LooseCombinedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19MediumIsoPFTau32Reg" ], + [ "hltPFTau32TrackPt1MediumIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19MediumIsoPFTau32Reg" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "ditau" ] + }, + "HLT_IsoMu19_eta2p1_MediumCombinedIsoPFTau32_Trk1_eta2p1_Reg_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19MediumCombinedIsoPFTau32Reg" ], + [ "hltPFTau32TrackPt1MediumCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19MediumCombinedIsoPFTau32Reg" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "ditau" ] + }, + "HLT_IsoMu19_eta2p1_TightCombinedIsoPFTau32_Trk1_eta2p1_Reg_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19TightCombinedIsoPFTau32Reg" ], + [ "hltPFTau32TrackPt1TightCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19TightCombinedIsoPFTau32Reg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu20erIorSingleMu22erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu21LooseIsoPFTau20" ], + [ "hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu21LooseIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu21_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v": { + "filters": [ + [ "hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21MediumIsoPFTau32Reg" ], + [ "hltPFTau32TrackPt1MediumIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21MediumIsoPFTau32Reg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu21_eta2p1_MediumCombinedIsoPFTau32_Trk1_eta2p1_Reg_v": { + "filters": [ + [ "hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21MediumCombinedIsoPFTau32Reg" ], + [ "hltPFTau32TrackPt1MediumCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21MediumCombinedIsoPFTau32Reg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu21_eta2p1_TightCombinedIsoPFTau32_Trk1_eta2p1_Reg_v": { + "filters": [ + [ "hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21TightCombinedIsoPFTau32Reg" ], + [ "hltPFTau32TrackPt1TightCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21TightCombinedIsoPFTau32Reg" ] + ], + "leg_types": [ "mu", "tau" ] + } +} diff --git a/TauTagAndProbe/data/2017/Pileup_Data2017.root b/TauTagAndProbe/data/2017/Pileup_Data2017.root new file mode 100644 index 00000000000..3993c08dfdb Binary files /dev/null and b/TauTagAndProbe/data/2017/Pileup_Data2017.root differ diff --git a/TauTagAndProbe/data/2017/crab/DY.txt b/TauTagAndProbe/data/2017/crab/DY.txt new file mode 100644 index 00000000000..06e3c088d01 --- /dev/null +++ b/TauTagAndProbe/data/2017/crab/DY.txt @@ -0,0 +1,6 @@ +period=Run2017 isMC=True runDeepTau=True + +#DYJetsToLL_M-50-amcatnloFXFX /DYJetsToLL_M-50_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM +#DYJetsToLL_M-50-amcatnloFXFX_ext1 /DYJetsToLL_M-50_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14_ext1-v1/MINIAODSIM +DYJetsToLL_M-50 /DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM +DYJetsToLL_M-50_ext1 /DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14_ext1-v1/MINIAODSIM diff --git a/TauTagAndProbe/data/2017/crab/Data_SingleMuon.txt b/TauTagAndProbe/data/2017/crab/Data_SingleMuon.txt new file mode 100644 index 00000000000..b0c6860cefd --- /dev/null +++ b/TauTagAndProbe/data/2017/crab/Data_SingleMuon.txt @@ -0,0 +1,8 @@ +lumiMask=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions17/13TeV/ReReco/Cert_294927-306462_13TeV_EOY2017ReReco_Collisions17_JSON.txt +period=Run2017 isMC=False runDeepTau=True + +SingleMuon_Run2017B /SingleMuon/Run2017B-31Mar2018-v1/MINIAOD +SingleMuon_Run2017C /SingleMuon/Run2017C-31Mar2018-v1/MINIAOD +SingleMuon_Run2017D /SingleMuon/Run2017D-31Mar2018-v1/MINIAOD +SingleMuon_Run2017E /SingleMuon/Run2017E-31Mar2018-v1/MINIAOD +SingleMuon_Run2017F /SingleMuon/Run2017F-31Mar2018-v1/MINIAOD diff --git a/TauTagAndProbe/data/2017/crab/TT.txt b/TauTagAndProbe/data/2017/crab/TT.txt new file mode 100644 index 00000000000..51632c8c6c2 --- /dev/null +++ b/TauTagAndProbe/data/2017/crab/TT.txt @@ -0,0 +1,5 @@ +period=Run2017 isMC=True runDeepTau=True + + +TTTo2L2Nu_TuneCP5_PSweights_powheg_pythia8_2017 /TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_new_pmx_94X_mc2017_realistic_v14-v2/MINIAODSIM +TTToSemiLeptonic_TuneCP5_PSweights_powheg_pythia8_2017 /TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM diff --git a/TauTagAndProbe/data/2017/crab/W.txt b/TauTagAndProbe/data/2017/crab/W.txt new file mode 100644 index 00000000000..79fca4f329b --- /dev/null +++ b/TauTagAndProbe/data/2017/crab/W.txt @@ -0,0 +1,3 @@ +period=Run2017 isMC=True runDeepTau=True + +WJetsToLNu_ext1 /WJetsToLNu_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14_ext1-v2/MINIAODSIM diff --git a/TauTagAndProbe/data/2017/triggers.json b/TauTagAndProbe/data/2017/triggers.json new file mode 100644 index 00000000000..ff41d67c085 --- /dev/null +++ b/TauTagAndProbe/data/2017/triggers.json @@ -0,0 +1,184 @@ +{ + "HLT_IsoMu27_v": { + "filters": [ [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07" ] ], + "leg_types": [ "mu" ], + "is_tag": 1 + }, + "HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau20" ], + [ "hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_TightID_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoTightOOSCPhotonsPFTau20" ], + [ "hltPFTau20TrackLooseChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24LooseChargedIsoTightOOSCPhotonsPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau20" ], + [ "hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_TightID_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoTightOOSCPhotonsPFTau20" ], + [ "hltPFTau20TrackMediumChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24MediumChargedIsoTightOOSCPhotonsPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau20" ], + [ "hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24TightChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_TightID_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoTightOOSCPhotonsPFTau20" ], + [ "hltPFTau20TrackTightChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24TightChargedIsoTightOOSCPhotonsPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1LooseChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1LooseChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24LooseChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32, "hltObj_pt": 40 }, + "target_channels": [ "ditau" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32, "hltObj_pt": 40 }, + "target_channels": [ "ditau" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 }, + "target_channels": [ "ditau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v": { + "filters": [ + [ "hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f20QL3trkIsoFiltered0p07" ], + [ "hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded" ], + [ "hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ], + "selection_etau": { "l1Tau_pt": 26, "l1Tau_hwIso": 0, "hltObj_pt": 30 }, + "target_channels": [ "etau", "mutau" ] + }, + "HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded" ], + [ "hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded" ], + [ "hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau40_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau40MonitoringReg" ], + [ "hltSelectedPFTau40TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau40MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg" ], + [ "hltSelectedPFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau40_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau40MonitoringReg" ], + [ "hltSelectedPFTau40TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau40MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg" ], + [ "hltSelectedPFTau40TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "selection": { "l1Tau_pt": 32 } + } +} diff --git a/TauTagAndProbe/data/2018/Pileup_Data2018.root b/TauTagAndProbe/data/2018/Pileup_Data2018.root new file mode 100644 index 00000000000..6ee4a093910 Binary files /dev/null and b/TauTagAndProbe/data/2018/Pileup_Data2018.root differ diff --git a/TauTagAndProbe/data/2018/crab/DY.txt b/TauTagAndProbe/data/2018/crab/DY.txt new file mode 100644 index 00000000000..679174b1553 --- /dev/null +++ b/TauTagAndProbe/data/2018/crab/DY.txt @@ -0,0 +1,5 @@ +period=Run2018 isMC=True runDeepTau=True + +#DYJetsToLL_M-50-amcatnloFXFX /DYJetsToLL_M-50_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM +#DYJetsToLL_M-50-amcatnloFXFX_ext2 /DYJetsToLL_M-50_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15_ext2-v1/MINIAODSIM +DYJetsToLL_M-50 /DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM diff --git a/TauTagAndProbe/data/2018/crab/Data_SingleMuon_ABC.txt b/TauTagAndProbe/data/2018/crab/Data_SingleMuon_ABC.txt new file mode 100644 index 00000000000..753e25589c2 --- /dev/null +++ b/TauTagAndProbe/data/2018/crab/Data_SingleMuon_ABC.txt @@ -0,0 +1,6 @@ +lumiMask=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/ReReco/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt +period=Run2018ABC isMC=False runDeepTau=True + +SingleMuon_Run2018A /SingleMuon/Run2018A-17Sep2018-v2/MINIAOD +SingleMuon_Run2018B /SingleMuon/Run2018B-17Sep2018-v1/MINIAOD +SingleMuon_Run2018C /SingleMuon/Run2018C-17Sep2018-v1/MINIAOD diff --git a/TauTagAndProbe/data/2018/crab/Data_SingleMuon_D.txt b/TauTagAndProbe/data/2018/crab/Data_SingleMuon_D.txt new file mode 100644 index 00000000000..cbf97bf3a1c --- /dev/null +++ b/TauTagAndProbe/data/2018/crab/Data_SingleMuon_D.txt @@ -0,0 +1,4 @@ +lumiMask=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/ReReco/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt +period=Run2018D isMC=False runDeepTau=True + +SingleMuon_Run2018D /SingleMuon/Run2018D-22Jan2019-v2/MINIAOD diff --git a/TauTagAndProbe/data/2018/crab/TT.txt b/TauTagAndProbe/data/2018/crab/TT.txt new file mode 100644 index 00000000000..036e8fb96e0 --- /dev/null +++ b/TauTagAndProbe/data/2018/crab/TT.txt @@ -0,0 +1,4 @@ +period=Run2018 isMC=True runDeepTau=True + +TTTo2L2Nu_TuneCP5_powheg_pythia8_2018 /TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM +TTToSemiLeptonic_TuneCP5_powheg_pythia8_2018 /TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM diff --git a/TauTagAndProbe/data/2018/crab/W.txt b/TauTagAndProbe/data/2018/crab/W.txt new file mode 100644 index 00000000000..8384fcd1faf --- /dev/null +++ b/TauTagAndProbe/data/2018/crab/W.txt @@ -0,0 +1,3 @@ +period=Run2018 isMC=True runDeepTau=True + +WJetsToLNu /WJetsToLNu_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v2/MINIAODSIM diff --git a/TauTagAndProbe/data/2018/triggers.json b/TauTagAndProbe/data/2018/triggers.json new file mode 100644 index 00000000000..083f664c45e --- /dev/null +++ b/TauTagAndProbe/data/2018/triggers.json @@ -0,0 +1,224 @@ +{ + "HLT_IsoMu27_v": { + "filters": [ [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07" ] ], + "leg_types": [ "mu" ], + "is_tag": 1 + }, + "HLT_IsoMu27_LooseChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20" ], + [ "hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu27_MediumChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20" ], + [ "hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu27_TightChargedIsoPFTau20_Trk1_eta2p1_SingleL_v": { + "filters": [ + [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20" ], + [ "hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "ditau" ], + "selection_ditau": { "hltObj_pt": 40 }, + "max_run": 317509, + "sample_types": [ "data" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "ditau" ], + "selection_ditau": { "hltObj_pt": 40 }, + "max_run": 317509, + "sample_types": [ "data" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "ditau" ], + "max_run": 317509, + "sample_types": [ "data" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v": { + "filters": [ + [ "hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f24QL3trkIsoFiltered0p07" ], + [ "hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded" ], + [ "hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ], + "selection_etau": { "l1Tau_pt": 26, "l1Tau_hwIso": 0, "hltObj_pt": 30 }, + "target_channels": [ "etau", "mutau" ], + "max_run": 317509, + "sample_types": [ "data" ] + }, + "HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded" ], + [ "hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded" ], + [ "hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded" ], + [ "hltHpsSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ], + "selection_etau": { "l1Tau_pt": 26, "l1Tau_hwIso": 0, "hltObj_pt": 30 }, + "target_channels": [ "etau", "mutau" ], + "min_run": 317509, + "sample_types": [ "data", "ztt_mc", "zmm_mc","w_mc", "ttbar_mc", "qcd" ] + }, + "HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded" ], + [ "hltHpsSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded" ], + [ "hltHpsSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltHpsSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltHpsSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ], + [ "hltHpsSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg" ], + [ "hltHpsSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ], + "target_channels": [ "ditau" ], + "min_run": 317509, + "sample_types": [ "data", "ztt_mc", "zmm_mc","w_mc", "ttbar_mc", "qcd" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg" ], + [ "hltHpsSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltHpsSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu24_eta2p1_TightChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_CrossL1_v": { + "filters": [ + [ "hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ], + [ "hltHpsSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu27_LooseChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27LooseChargedIsoPFTau20" ], + [ "hltHpsPFTau20TrackLooseChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27LooseChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu27_MediumChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27MediumChargedIsoPFTau20" ], + [ "hltHpsPFTau20TrackMediumChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27MediumChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_IsoMu27_TightChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v": { + "filters": [ + [ "hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27TightChargedIsoPFTau20" ], + [ "hltHpsPFTau20TrackTightChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27TightChargedIsoPFTau20" ] + ], + "leg_types": [ "mu", "tau" ] + }, + "HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_v": { + "filters": [ + [ "hltSelectedPFTau180MediumChargedIsolationL1HLTMatched" ] + ], + "leg_types": [ "tau" ] + } +} diff --git a/TauTagAndProbe/interface/EventTuple.h b/TauTagAndProbe/interface/EventTuple.h new file mode 100644 index 00000000000..203a20f5302 --- /dev/null +++ b/TauTagAndProbe/interface/EventTuple.h @@ -0,0 +1,103 @@ +/*! Definition of a tuple with all event information that is required for the tau analysis. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include "TauTriggerTools/Common/interface/SmartTree.h" +#include "TauTriggerTools/Common/interface/TauIdResults.h" +#include + +#define TAU_ID(name, pattern, has_raw, wp_list) VAR(uint16_t, name) VAR(Float_t, name##raw) + +#define VAR2(type, name1, name2) VAR(type, name1) VAR(type, name2) +#define VAR3(type, name1, name2, name3) VAR2(type, name1, name2) VAR(type, name3) +#define VAR4(type, name1, name2, name3, name4) VAR3(type, name1, name2, name3) VAR(type, name4) + +#define EVENT_DATA() \ + /* Event Variables */ \ + VAR(UInt_t, run) /* run number */ \ + VAR(UInt_t, lumi) /* lumi section */ \ + VAR(ULong64_t, evt) /* event number */ \ + VAR(Int_t, npv) /* number of primary vertices */ \ + VAR(Float_t, genEventWeight) /* gen event weight */ \ + VAR(Float_t, npu) /* number of in-time pu interactions added to the event */ \ + /* PF MET variables */ \ + VAR2(Float_t, met_pt, met_phi) /* pt and phi of the MET */ \ + /* Tag muon variables */ \ + VAR4(Float_t, muon_pt, muon_eta, muon_phi, muon_mass) /* 4-momentum of the muon */ \ + VAR(Int_t, muon_charge) /* muon charge */ \ + VAR(Float_t, muon_iso) /* muon pfRel isolation */ \ + VAR(Float_t, muon_mt) /* muon transverse mass */ \ + VAR(Int_t, muon_gen_match) /* matching of the muon with leptons on the generator level: + Electron = 1, Muon = 2, TauElectron = 3, TauMuon = 4, Tau = 5, NoMatch = 6 */ \ + VAR(Int_t, muon_gen_charge) /* charge of the gen lepton that was matched with the muon */ \ + VAR4(Float_t, muon_gen_vis_pt, muon_gen_vis_eta, muon_gen_vis_phi, muon_gen_vis_mass) /* visible 4-momentum of the + gen lepton that was matched with the muon */ \ + /* Basic tau variables */ \ + VAR(UInt_t, tau_sel) /* how tau was selected */ \ + VAR4(Float_t, tau_pt, tau_eta, tau_phi, tau_mass) /* 4-momentum of the tau */ \ + VAR(Int_t, tau_charge) /* tau charge */ \ + VAR(Int_t, tau_gen_match) /* matching of the tau with leptons on the generator level: + Electron = 1, Muon = 2, TauElectron = 3, TauMuon = 4, Tau = 5, NoMatch = 6 */ \ + VAR(Int_t, tau_gen_charge) /* charge of the gen lepton that was matched with the tau */ \ + VAR4(Float_t, tau_gen_vis_pt, tau_gen_vis_eta, tau_gen_vis_phi, tau_gen_vis_mass) /* visible 4-momentum of the + gen lepton that was matched with the tau */ \ + VAR4(Float_t, tau_gen_rad_pt, tau_gen_rad_eta, tau_gen_rad_phi, tau_gen_rad_energy) /* visible 4-momentum of the + initial state radiation emmited by the gen tau */ \ + VAR4(Int_t, tau_gen_n_charged_hadrons, tau_gen_n_neutral_hadrons, tau_gen_n_gammas, tau_gen_n_gammas_rad) /* + number of charged and neutral hadrons, gammas and initial state radiation gammas produced by the tau + decay at the generator level */ \ + /* Tau ID variables */ \ + VAR(Int_t, tau_decayMode) /* tau decay mode */ \ + VAR(Int_t, tau_oldDecayModeFinding) /* tau passed the old decay mode finding requirements */ \ + TAU_IDS() \ + /* Tau transverse impact paramters. + See cmssw/RecoTauTag/RecoTau/plugins/PFTauTransverseImpactParameters.cc for details */ \ + VAR(Float_t, tau_dxy) /* tau signed transverse impact parameter wrt to the primary vertex */ \ + VAR(Float_t, tau_dxy_error) /* uncertainty of the transverse impact parameter measurement */ \ + VAR(Float_t, tau_ip3d) /* tau signed 3D impact parameter wrt to the primary vertex */ \ + VAR(Float_t, tau_ip3d_error) /* uncertainty of the 3D impact parameter measurement */ \ + VAR(Float_t, tau_dz) /* tau dz of the leadChargedHadrCand wrt to the primary vertex */ \ + VAR(Float_t, tau_dz_error) /* uncertainty of the tau dz measurement */ \ + /* mu-tau variables */ \ + VAR(Float_t, vis_mass) /* visible mu-tau mass */ \ + /* HLT results and objects */ \ + VAR(ULong64_t, hlt_accept) /* HLT accept bits */ \ + VAR(ULong64_t, hlt_acceptAndMatch) /* HLT accept & match bits */ \ + VAR(std::vector, hltObj_types) /* types of the HLT object */ \ + VAR4(std::vector, hltObj_pt, hltObj_eta, hltObj_phi, hltObj_mass) /* 4-momentum of the HLT object */ \ + VAR(std::vector, hltObj_hasPathName) /* whatever the HLT object has a path name */ \ + VAR(std::vector, hltObj_isBestMatch) /* whatever the HLT object the best match for a path name */ \ + VAR(std::vector, hltObj_miniAODIndex) /* index of the HLT object record in the MiniAOD */ \ + /* HLT filters */ \ + VAR(std::vector, filter_hltObj) /* index of the HLT object */ \ + VAR(std::vector, filter_hash) /* hash of the name of the filter */ \ + /* Matched L1 tau */ \ + VAR4(Float_t, l1Tau_pt, l1Tau_eta, l1Tau_phi, l1Tau_mass) /* 4-momentum of the L1 tau */ \ + VAR(Int_t, l1Tau_hwIso) /* integer "hardware" isolation value of the L1 tau */ \ + VAR(Int_t, l1Tau_hwQual) /* integer "hardware" quality value of the L1 tau */ \ + /**/ + +#define VAR(type, name) DECLARE_BRANCH_VARIABLE(type, name) +DECLARE_TREE(tau_trigger, Event, EventTuple, EVENT_DATA, "events") +#undef VAR + +#define VAR(type, name) ADD_DATA_TREE_BRANCH(name) +INITIALIZE_TREE(tau_trigger, EventTuple, EVENT_DATA) +#undef VAR +#undef VAR2 +#undef VAR3 +#undef VAR4 +#undef EVENT_DATA +#undef TAU_ID + +namespace tau_trigger { + +template +constexpr T DefaultFillValue() { return std::numeric_limits::lowest(); } +template<> +constexpr float DefaultFillValue() { return -999.; } +template<> +constexpr int DefaultFillValue() { return -999; } + +} // namespace tau_tuple diff --git a/TauTagAndProbe/interface/GenHelper.h b/TauTagAndProbe/interface/GenHelper.h deleted file mode 100644 index c0917b61789..00000000000 --- a/TauTagAndProbe/interface/GenHelper.h +++ /dev/null @@ -1,70 +0,0 @@ -/* -** -** Helpers for gen info -** -** -** \date: 13 May 2015 -** \author: L. Cadamuro (LLR) -*/ - -#ifndef GenHelper_h -#define GenHelper_h - -#include "TVector3.h" -#include "TLorentzVector.h" - -#include -#include - -namespace genhelper { - - enum HZDecay { - MuHad = 0, - EHad = 1, - HadHad = 2, - MuMu = 3, - EE = 4, - EMu = 5, - EEPrompt = 6, // prompt Z->ee/mumu decays - MuMuPrompt = 7, - Other = 8 // for e.g. h->bb - }; - - enum WDecay { - Had = 0, // W->qqbar - MuPrompt = 1, - EPrompt = 2, - TauMu = 3, // W->tau->mu - TauE = 4, // W->tau->e - TauHad = 5, // W->tau->tauh - other = 6 - }; - - bool IsLastCopy (const reco::GenParticle& part); // return true if particle has no sons with its same pdgId to reject showering clones - bool IsFirstCopy (const reco::GenParticle& part, const bool checkAbsPdg = false); // return true if particle has no mothers with its same pdgId to handle showering clones - - int GetTauDecay (const reco::GenParticle& part); // 0: tau->mu; 1: tau->ele; 2: tau->had - int GetTauDecay (const reco::Candidate* part); // 0: tau->mu; 1: tau->ele; 2: tau->had - - const reco::Candidate* GetFirstCopy (const reco::Candidate* part); // follow all the replicated particle chain until the first clone - const reco::Candidate* GetLastCopy (const reco::Candidate* part); // follow all the replicated particle chain until the last clone - HZDecay GetHZDecay (const reco::Candidate* part); // return final state for H/Z -> see enum for code - WDecay GetWDecay (const reco::Candidate* part); // return final state for W -> see enum for code - WDecay GetTopDecay (const reco::Candidate* part); // return final state for top (= final state for W) -> see enum for code - - reco::GenParticle GetTauHad (const reco::Candidate* part); // build had tau by summing sons without nu - reco::GenParticle GetTauHadNeutrals (const reco::Candidate* part); // build neutral component of had tau by summing sons without nu - - const reco::Candidate* IsFromID (const reco::Candidate* part, int targetPDGId); // find if is son of a certain particle (select by targetPDGId); if not found, return NULL, else return its pointer - int GetIndexInOutput (const reco::Candidate* part, std::vector cands); - - typedef reco::GenParticleCollection::const_iterator IG; - typedef reco::GenParticleRefVector::const_iterator IGR; - TVector3 ImpactParameter(const TVector3& pv, const TVector3& sv, const TLorentzVector& p4);//Calculate generator level impact parameter - void GetTausDaughters(const reco::GenParticle& tau, reco::GenParticleRefVector& products, bool ignoreNus, bool direct); - void FindDescendents(const reco::GenParticle& base, reco::GenParticleRefVector& descendents, int status, int pdgId=0, bool skipPhotonsPi0AndFSR=false); - const reco::GenParticleRef GetLeadChParticle(const reco::GenParticleRefVector& products); - int getDetailedTauDecayMode(const reco::GenParticleRefVector& products); - -} -#endif diff --git a/TauTagAndProbe/interface/PyInterface.h b/TauTagAndProbe/interface/PyInterface.h new file mode 100644 index 00000000000..0c984fe0bea --- /dev/null +++ b/TauTagAndProbe/interface/PyInterface.h @@ -0,0 +1,301 @@ +/*! Definition of c++ methods used in python code. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +enum class LegType { e = 1, mu = 2, tau = 4, jet = 8 }; + +class TriggerMatchProvider { +public: + struct MatchDescriptor { + ULong64_t match_mask{0}; + std::vector filter_hashes; + int min_run{-1}, max_run{-1}; + float hltObj_pt{-1}, l1Tau_pt{-1}; + int l1Tau_hwIso{-1}; + + MatchDescriptor() {} + MatchDescriptor(ULong64_t _match_mask, const std::vector& _filter_hashes, int _min_run, int _max_run, + float _hltObj_pt, float _l1Tau_pt, float _l1Tau_hwIso) : + match_mask(_match_mask), filter_hashes(_filter_hashes), min_run(_min_run), max_run(_max_run), + hltObj_pt(_hltObj_pt), l1Tau_pt(_l1Tau_pt), l1Tau_hwIso(_l1Tau_hwIso) + { + } + }; + + void Add(int channel_id, const MatchDescriptor& desc) + { + channel_matches[channel_id].push_back(desc); + } + + bool Pass(int channel_id, UInt_t run, float tau_eta, float tau_phi, ULong64_t hlt_accept, float deltaRThr, + const ROOT::VecOps::RVec& hltObj_types, const ROOT::VecOps::RVec& hltObj_pt, + const ROOT::VecOps::RVec& hltObj_eta, const ROOT::VecOps::RVec& hltObj_phi, + const ROOT::VecOps::RVec& hltObj_hasPathName, const ROOT::VecOps::RVec& filter_hltObj, + const ROOT::VecOps::RVec& filter_hash, float l1Tau_pt, Int_t l1Tau_hwIso) const + { + const auto desc_iter = channel_matches.find(channel_id); + if(desc_iter != channel_matches.end()) { + const float deltaRThr2 = std::pow(deltaRThr, 2); + for(const MatchDescriptor& match_desc : desc_iter->second) { + if((hlt_accept & match_desc.match_mask) == 0) continue; + if(match_desc.min_run >= 0 && run < match_desc.min_run) continue; + if(match_desc.max_run >= 0 && run >= match_desc.max_run) continue; + if(match_desc.l1Tau_pt >= 0 && l1Tau_pt <= match_desc.l1Tau_pt) continue; + if(match_desc.l1Tau_hwIso >= 0 && l1Tau_hwIso <= match_desc.l1Tau_hwIso) continue; + + for(size_t n = 0; n < hltObj_pt.size(); ++n) { + if((hltObj_types.at(n) & static_cast(LegType::tau)) == 0) continue; + if((hltObj_hasPathName.at(n) & match_desc.match_mask) == 0) continue; + if(match_desc.hltObj_pt >= 0 && hltObj_pt.at(n) <= match_desc.hltObj_pt) continue; + const float deta = tau_eta - hltObj_eta.at(n); + const float dphi = ROOT::Math::VectorUtil::Phi_mpi_pi(tau_phi - hltObj_phi.at(n)); + const float deltaR2 = std::pow(deta, 2) + std::pow(dphi, 2); + if(deltaR2 >= deltaRThr2) continue; + if(PassFilters(match_desc.filter_hashes, n, filter_hltObj, filter_hash)) return true; + } + } + } + return false; + } + + static TriggerMatchProvider& Initialize() + { + default_provider.reset(new TriggerMatchProvider()); + return GetDefault(); + } + + static TriggerMatchProvider& GetDefault() + { + if(!default_provider) + throw std::runtime_error("Default TriggerMatchProvider is not initialized."); + return *default_provider; + } + +private: + static std::unique_ptr default_provider; + + bool PassFilters(const std::vector& filter_hashes, size_t hltObj_index, + const ROOT::VecOps::RVec& filter_hltObj, + const ROOT::VecOps::RVec& filter_hash) const + { + for(UInt_t filter_ref : filter_hashes) { + bool filter_found = false; + for(size_t n = 0; n < filter_hltObj.size() && !filter_found; ++n) { + filter_found = filter_hltObj.at(n) == hltObj_index && filter_hash.at(n) == filter_ref; + } + if(!filter_found) return false; + } + return true; + } + +private: + std::map> channel_matches; +}; + +class PileUpWeightProvider { +public: + PileUpWeightProvider(const TH1D& data_pu_orig, const TH1D& mc_pu_orig) + { + TH1D data_pu(data_pu_orig); + data_pu.Scale(1. / data_pu.Integral()); + TH1D mc_pu(mc_pu_orig); + mc_pu.Scale(1. / mc_pu.Integral()); + ratio.reset(new TH1D(data_pu)); + ratio->Divide(&mc_pu); + } + + float GetWeight(int npu) const + { + int bin = ratio->FindBin(npu); + if(bin < 1 || bin > ratio->GetNbinsX()) + return 0; + return ratio->GetBinContent(bin); + } + + static void Initialize(const TH1D& data_pu, const TH1D& mc_pu) + { + default_provider.reset(new PileUpWeightProvider(data_pu, mc_pu)); + } + + static const PileUpWeightProvider& GetDefault() + { + if(!default_provider) + throw std::runtime_error("Default PileUpWeightProvider is not initialized."); + return *default_provider; + } + +private: + static std::unique_ptr default_provider; + +private: + std::unique_ptr ratio; +}; + +std::unique_ptr TriggerMatchProvider::default_provider; +std::unique_ptr PileUpWeightProvider::default_provider; + +//---------------------------------------------------------------------------------------------------- +// define integer constants +// +// WARNING: the definition of these constants needs to match the definition in TauTriggerTools/TauTagAndProbe/python/estimateBackgrounds.py !! +// +const int type_data = 0; +const int type_ztt_mc = 1; +const int type_zmm_mc = 2; +const int type_w_mc = 3; +const int type_ttbar_mc = 4; + +const int selection_OS_low_mT = 0; +const int selection_OS_high_mT = 1; +const int selection_SS_low_mT = 2; +const int selection_SS_high_mT = 3; +//---------------------------------------------------------------------------------------------------- + +class final_weight_data +{ +public: + final_weight_data(float sf_qcd_SS_to_OS, float sf_w_mc_OS, float sf_w_mc_SS) + : sf_qcd_SS_to_OS_(sf_qcd_SS_to_OS) + , sf_w_mc_OS_(sf_w_mc_OS) + , sf_w_mc_SS_(sf_w_mc_SS) + {} + + float operator()(int selection, int type, float weight) const + { + float final_weight = 0.; + if ( selection == selection_OS_low_mT && type == type_data ) + { + final_weight = 1.; + } + else if ( selection == selection_SS_low_mT ) + { + final_weight = sf_qcd_SS_to_OS_; + if ( type == type_data ) + { + final_weight *= -1.; + } + else + { + final_weight *= +1. * weight; + if ( type == type_w_mc ) + { + final_weight *= sf_w_mc_SS_; + } + } + } + else if ( selection == selection_OS_low_mT && (type == type_zmm_mc || type == type_w_mc || type == type_ttbar_mc) ) + { + final_weight = -1. * weight; + if ( type == type_w_mc ) + { + final_weight *= sf_w_mc_OS_; + } + } + else + { + ostringstream error_message; + error_message << "Invalid function arguments: selection = '" << selection << "', type = '" << type << "' !!"; + throw std::runtime_error(error_message.str()); + } + return final_weight; + } + + static final_weight_data& Initialize(float sf_qcd_SS_to_OS, float sf_w_mc_OS, float sf_w_mc_SS) + { + default_provider.reset(new final_weight_data(sf_qcd_SS_to_OS, sf_w_mc_OS, sf_w_mc_SS)); + return GetDefault(); + } + + static final_weight_data& GetDefault() + { + if(!default_provider) + throw std::runtime_error("Default final_weight_data is not initialized."); + return *default_provider; + } + + +private: + static std::unique_ptr default_provider; + +private: + float sf_qcd_SS_to_OS_; + float sf_w_mc_OS_; + float sf_w_mc_SS_; +}; + +class final_weight_dy_mc +{ +public: + final_weight_dy_mc(float sf_qcd_SS_to_OS, float sf_w_mc_OS, float sf_w_mc_SS) + : sf_qcd_SS_to_OS_(sf_qcd_SS_to_OS) + , sf_w_mc_OS_(sf_w_mc_OS) + , sf_w_mc_SS_(sf_w_mc_SS) + {} + + float operator()(int selection, int type, float weight) const + { + float final_weight = 0.; + if ( selection == selection_OS_low_mT and type == type_ztt_mc ) + { + final_weight = +1. * weight; + } + else if ( selection == selection_SS_low_mT ) + { + final_weight = sf_qcd_SS_to_OS_; + if ( type == type_data ) + { + final_weight *= +1.; + } + else + { + final_weight *= -1. * weight; + if ( type == type_w_mc ) + { + final_weight *= sf_w_mc_SS_; + } + } + } + else if ( selection == selection_OS_low_mT && (type == type_zmm_mc || type == type_w_mc || type == type_ttbar_mc) ) + { + final_weight = +1. * weight; + if ( type == type_w_mc ) + { + final_weight *= sf_w_mc_OS_; + } + } + else + { + ostringstream error_message; + error_message << "Invalid function arguments: selection = '" << selection << "', type = '" << type << "' !!"; + throw std::runtime_error(error_message.str()); + } + return final_weight; + } + + static final_weight_dy_mc& Initialize(float sf_qcd_SS_to_OS, float sf_w_mc_OS, float sf_w_mc_SS) + { + default_provider.reset(new final_weight_dy_mc(sf_qcd_SS_to_OS, sf_w_mc_OS, sf_w_mc_SS)); + return GetDefault(); + } + + static final_weight_dy_mc& GetDefault() + { + if(!default_provider) + throw std::runtime_error("Default final_weight_dy_mc is not initialized."); + return *default_provider; + } + + +private: + static std::unique_ptr default_provider; + +private: + float sf_qcd_SS_to_OS_; + float sf_w_mc_OS_; + float sf_w_mc_SS_; +}; + +std::unique_ptr final_weight_data::default_provider; +std::unique_ptr final_weight_dy_mc::default_provider; + diff --git a/TauTagAndProbe/interface/SummaryTuple.h b/TauTagAndProbe/interface/SummaryTuple.h new file mode 100644 index 00000000000..94e0d8b02d9 --- /dev/null +++ b/TauTagAndProbe/interface/SummaryTuple.h @@ -0,0 +1,118 @@ +/*! Definition of a tuple with summary information about production. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#pragma once + +#include "EventTuple.h" +#include "TauTriggerTools/Common/interface/Tools.h" + +#define SUMMARY_DATA() \ + /* Run statistics */ \ + VAR(UInt_t, exeTime) \ + VAR(ULong64_t, numberOfProcessedEvents) \ + VAR(Double_t, totalGenEventWeight) \ + /* Trigger information */ \ + VAR(std::vector, trigger_index) \ + VAR(std::vector, trigger_pattern) \ + /* Filter information */ \ + VAR(std::vector, filter_name) \ + VAR(std::vector, filter_hash) \ + /**/ + +#define VAR(type, name) DECLARE_BRANCH_VARIABLE(type, name) +DECLARE_TREE(tau_trigger, ProdSummary, SummaryTuple, SUMMARY_DATA, "summary") +#undef VAR + +#define VAR(type, name) ADD_DATA_TREE_BRANCH(name) +INITIALIZE_TREE(tau_trigger, SummaryTuple, SUMMARY_DATA) +#undef VAR +#undef SUMMARY_DATA + + +#define EVENT_EXPRESS_DATA() \ + VAR(UInt_t, run) /* run */ \ + VAR(UInt_t, lumi) /* lumi section */ \ + VAR(ULong64_t, evt) /* event number */ \ + VAR(Int_t, npv) /* number of primary vertices */ \ + VAR(Float_t, genEventWeight) /* gen event weight */ \ + VAR(Float_t, npu) /* Number of in-time pu interactions added to the event */ \ + /**/ + +#define VAR(type, name) DECLARE_BRANCH_VARIABLE(type, name) +DECLARE_TREE(tau_trigger, ExpressEvent, ExpressTuple, EVENT_EXPRESS_DATA, "all_events") +#undef VAR + +#define VAR(type, name) ADD_DATA_TREE_BRANCH(name) +INITIALIZE_TREE(tau_trigger, ExpressTuple, EVENT_EXPRESS_DATA) +#undef VAR +#undef EVENT_EXPRESS_DATA + +namespace tau_trigger { + +struct SummaryProducerData { +public: + using Mutex = SummaryTuple::Mutex; + using LockGuard = std::lock_guard; + using clock = std::chrono::system_clock; + + static SummaryProducerData& GetData() + { + if(GetDataPtr() == nullptr) + throw analysis::exception("SummaryProducerData is not initialized."); + return *GetDataPtr(); + } + +private: + static SummaryProducerData*& GetDataPtr() + { + static SummaryProducerData* data = nullptr; + return data; + } + +public: + SummaryProducerData(TFile& file, bool createExpressTuple) : + start(clock::now()), summaryTuple(std::make_unique("summary", &file, false)) + { + if(createExpressTuple) + expressTuple = std::make_unique("all_events", &file, false); + if(GetDataPtr() != nullptr) + throw analysis::exception("Having multiple instances of SummaryProducerData is not supported."); + GetDataPtr() = this; + } + + SummaryTuple* getSummaryTuple() const { return summaryTuple.get(); } + ExpressTuple* getExpressTuple() const { return expressTuple.get(); } + Mutex& getMutex() const { return summaryTuple->GetMutex(); } + + unsigned getElapsedTime() const + { + return std::chrono::duration_cast(clock::now() - start).count(); + } + + uint32_t getFilterHash(const std::string& filterName) + { + LockGuard lock(getMutex()); + auto iter = filterNameToHash.find(filterName); + if(iter == filterNameToHash.end()) { + const uint32_t hash = analysis::tools::hash(filterName); + if(filterHashToName.count(hash)) + throw analysis::exception("Duplicated hash = %1% for filters '%2%' and '%3%'.") % hash % filterName + % filterHashToName.at(hash); + filterNameToHash[filterName] = hash; + filterHashToName[hash] = filterName; + iter = filterNameToHash.find(filterName); + } + return iter->second; + } + + const std::map& getFilters() const { return filterNameToHash; } + +private: + const clock::time_point start; + std::unique_ptr summaryTuple; + std::unique_ptr expressTuple; + std::map filterNameToHash; + std::map filterHashToName; +}; + +} \ No newline at end of file diff --git a/TauTagAndProbe/plugins/BuildFile.xml b/TauTagAndProbe/plugins/BuildFile.xml index e2a41e2519d..da41395aab8 100644 --- a/TauTagAndProbe/plugins/BuildFile.xml +++ b/TauTagAndProbe/plugins/BuildFile.xml @@ -1,23 +1,3 @@ - - @@ -29,14 +9,6 @@ - + - - diff --git a/TauTagAndProbe/plugins/GenFiller.cc b/TauTagAndProbe/plugins/GenFiller.cc deleted file mode 100644 index b357987b9aa..00000000000 --- a/TauTagAndProbe/plugins/GenFiller.cc +++ /dev/null @@ -1,359 +0,0 @@ -/* \class GenFiller -** -** Create a collection of filtered gen level objects -** (including the creation of hadronic taus) -** -** \date: 13 May 2015 -** \author: L. Cadamuro (LLR) -*/ - -#define DEBUG false - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "TauTriggerTools/TauTagAndProbe/interface/GenHelper.h" - -#include - -using namespace edm; -using namespace std; -using namespace reco; - -class GenFiller : public edm::EDProducer { - public: - /// Constructor - explicit GenFiller(const edm::ParameterSet&); - /// Destructor - ~GenFiller(){}; - - private: - virtual void beginJob(){}; - virtual void produce(edm::Event&, const edm::EventSetup&); - virtual void endJob(){}; - - bool IsInteresting (const GenParticle& p); - int makeFlagVector (const GenParticle* p); // put all gen flags in a single int word - bool isVBFParton(const GenParticle& p); - - //edm::InputTag src_; - edm::EDGetTokenT > src_; - std::vector cands_; - //std::vector tauHadcands_; // gen H tau build in this class - std::vector tauHadcandsMothers_; // contains the index in the cands_ vector of the tauh mother - const bool storeLightFlavAndGlu_; -}; - -// ------------------------------------------------------------------ - -GenFiller::GenFiller(const edm::ParameterSet& iConfig): -src_(consumes >(iConfig.getParameter("src"))), -storeLightFlavAndGlu_(iConfig.getParameter("storeLightFlavAndGlu")) -{ - //src_ = iConfig.getParameter("src"); - produces(); -} - -void GenFiller::produce(edm::Event& iEvent, const edm::EventSetup& iSetup) -{ - Handle > genHandle; - iEvent.getByToken (src_, genHandle); - cands_.clear(); - tauHadcandsMothers_.clear(); - - // output collection - std::unique_ptr result( new pat::GenericParticleCollection ); - - unsigned int Ngen = genHandle->size(); - - // fill vector of interesting Candidate* object, also used later for indexes - // tau decays are analyzed here to build tauH candidates and put them in the list - for (unsigned int iGen = 0; iGen < Ngen; iGen++) - { - const GenParticle& genP = (*genHandle)[iGen]; - if (IsInteresting (genP)) - { - cands_.push_back (&genP); - } - } - - // loop on all previously filtered Gen Particles and establish relations between them + set flags - unsigned int NGenSel = cands_.size(); - if (DEBUG) cout << "SELECTED PARTICLES: " << NGenSel << endl; - for (unsigned int iGen = 0; iGen < NGenSel; iGen++) - { - const reco::Candidate* genP = cands_.at(iGen); - const GenParticle* genPClone = (GenParticle*) cands_.at(iGen); - int PdgId = genP->pdgId(); - int APdgId = abs(PdgId); - - //only select some particles - pat::GenericParticle filtGenP (*genP); // to be saved in output - if (DEBUG) cout << iGen << " | id: " << genP->pdgId () << " pt: " << genP->pt() << " eta: " << genP->eta() << " | px: " << genP->px() << " , eta: " << genP->eta() << endl; - - // ------------------- general info flag on particles - filtGenP.addUserInt ("generalGenFlags", makeFlagVector (genPClone)); - - // ------------------- tau decay flags - if (APdgId == 15) - { - int decay = genhelper::GetTauDecay(genP); - filtGenP.addUserInt ("tauGenDecayMode", decay); - if (decay == 2) tauHadcandsMothers_.push_back (iGen); // for later usage for tauh - if (DEBUG) cout << " --> tau decay: " << decay << endl; - } - - // -------------------- H/Z decay mode: set final state and is is prompt - if (APdgId == 25 || APdgId == 23 || APdgId == 36) - { - genhelper::HZDecay decay = genhelper::GetHZDecay (genP); - filtGenP.addUserInt ("HZDecayMode", static_cast (decay)); - if (DEBUG) cout << " --> H/Z decay: " << decay << endl; - } - - // -------------------- W decay mode: set final state and is is prompt - if (APdgId == 24) - { - genhelper::WDecay decay = genhelper::GetWDecay (genP); - filtGenP.addUserInt ("WDecayMode", static_cast (decay)); - if (DEBUG) cout << " --> W decay: " << decay << endl; - } - - // -------------------- top decay mode: set final state and is is prompt - if (APdgId == 6) - { - genhelper::WDecay decay = genhelper::GetTopDecay (genP); - filtGenP.addUserInt ("TopDecayMode", static_cast (decay)); - if (DEBUG) cout << " --> Top decay: " << decay << endl; - } - - - // ------------------- mother info flag - - const reco::Candidate* MothPtr; - - // H - MothPtr = genhelper::IsFromID (genP, 25); - if (MothPtr != NULL) // save space, only add userfloats when valid - { - filtGenP.addUserInt ("HMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromH: 1, indexH: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - // H MSSM - MothPtr = genhelper::IsFromID (genP, 36); - if (MothPtr != NULL) // save space, only add userfloats when valid - { - filtGenP.addUserInt ("MSSMHMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromH(MSSM): 1, indexH: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - // Z - MothPtr = genhelper::IsFromID (genP, 23); - if (MothPtr != NULL) // save space, only add userfloats when valid - { - filtGenP.addUserInt ("ZMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromZ: 1, indexZ: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - // top - MothPtr = genhelper::IsFromID (genP, 6); - if (MothPtr != NULL) - { - filtGenP.addUserInt ("TopMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromTop: 1, indexTop: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - // W - MothPtr = genhelper::IsFromID (genP, 24); - if (MothPtr != NULL) // save space, only add userfloats when valid - { - filtGenP.addUserInt ("WMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromW: 1, indexW: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - // b - MothPtr = genhelper::IsFromID (genP, 5); - if (MothPtr != NULL) - { - filtGenP.addUserInt ("bMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromb: 1, indexb: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - // tau - MothPtr = genhelper::IsFromID (genP, 15); - if (MothPtr != NULL) - { - filtGenP.addUserInt ("TauMothIndex", genhelper::GetIndexInOutput(MothPtr, cands_)); - if (DEBUG) cout << " --> fromTau: 1, indexTau: " << genhelper::GetIndexInOutput(MothPtr, cands_) << endl; - } - - - if(filtGenP.hasUserInt("tauGenDecayMode") && - (filtGenP.hasUserInt("ZMothIndex") || filtGenP.hasUserInt("HMothIndex") || filtGenP.hasUserInt("MSSMHMothIndex"))){ - - TVector3 aPVGenPoint = TVector3(genPClone->vx(), genPClone->vy(), genPClone->vz()); - reco::GenParticleRefVector tauDaughters; - genhelper::GetTausDaughters(*genPClone,tauDaughters,true,false); - int detailedDecayMode = genhelper::getDetailedTauDecayMode(tauDaughters); - filtGenP.addUserInt("tauGenDetailedDecayMode", detailedDecayMode); - - reco::GenParticleRef leadChParticleRef = genhelper::GetLeadChParticle(tauDaughters); - - TLorentzVector p4LeadingChParticle(leadChParticleRef->px(), - leadChParticleRef->py(), - leadChParticleRef->pz(), - leadChParticleRef->energy()); - TVector3 tauDecayVertex(leadChParticleRef->vx(), leadChParticleRef->vy(), leadChParticleRef->vz()); - TVector3 pca = genhelper::ImpactParameter(aPVGenPoint, tauDecayVertex, p4LeadingChParticle); - filtGenP.addUserFloat("pca_x",pca.X()); - filtGenP.addUserFloat("pca_y",pca.Y()); - filtGenP.addUserFloat("pca_z",pca.Z()); - } - - result->push_back (filtGenP); - } - - - // finally, do the hadronic tau (leave as last step not to spoil internal ordering of mother / daughter vector) - if (DEBUG) cout << "BUILDING tauH, size: " << tauHadcandsMothers_.size() << endl; - for (unsigned int iTauH = 0; iTauH < tauHadcandsMothers_.size(); iTauH++) - { - int tauMothInd = tauHadcandsMothers_.at(iTauH); - pat::GenericParticle tauH (genhelper::GetTauHad(cands_.at(tauMothInd))); - pat::GenericParticle tauH_neutral (genhelper::GetTauHadNeutrals(cands_.at(tauMothInd))); - tauH.addUserInt ("TauMothIndex", tauMothInd); - tauH_neutral.addUserInt ("TauMothIndex", tauMothInd); - - // copy all the other flags from original tau - pat::GenericParticle& tauMothGenP = result->at(tauMothInd); - if (tauMothGenP.hasUserInt("HMothIndex") ){ - tauH.addUserInt ("HMothIndex", tauMothGenP.userInt ("HMothIndex")); - tauH_neutral.addUserInt ("HMothIndex", tauMothGenP.userInt ("HMothIndex")); - } - if (tauMothGenP.hasUserInt("MSSMHMothIndex") ){ - tauH.addUserInt ("MSSMHMothIndex", tauMothGenP.userInt ("MSSMHMothIndex")); - tauH_neutral.addUserInt ("MSSMHMothIndex", tauMothGenP.userInt ("MSSMHMothIndex")); - } - if (tauMothGenP.hasUserInt("TopMothIndex") ){ - tauH.addUserInt ("TopMothIndex", tauMothGenP.userInt ("TopMothIndex")); - tauH_neutral.addUserInt ("TopMothIndex", tauMothGenP.userInt ("TopMothIndex")); - } - if (tauMothGenP.hasUserInt("bMothIndex") ){ - tauH.addUserInt ("bMothIndex", tauMothGenP.userInt ("bMothIndex")); - tauH_neutral.addUserInt ("bMothIndex", tauMothGenP.userInt ("bMothIndex")); - } - if (tauMothGenP.hasUserInt("WMothIndex") ){ - tauH.addUserInt ("WMothIndex", tauMothGenP.userInt ("WMothIndex")); - tauH_neutral.addUserInt ("WMothIndex", tauMothGenP.userInt ("WMothIndex")); - } - if (tauMothGenP.hasUserInt("ZMothIndex") ){ - tauH.addUserInt ("ZMothIndex", tauMothGenP.userInt ("ZMothIndex")); - tauH_neutral.addUserInt ("ZMothIndex", tauMothGenP.userInt ("ZMothIndex")); - } - - - - // many flags change of meaning w.r.t. mother tau, put everything to 0 (can be changed in future) - int tauhFlags = 0; - tauH.addUserInt ("generalGenFlags", tauhFlags); // remember! TauH inherits ALL the flags from - tauH_neutral.addUserInt ("generalGenFlags", tauhFlags); // remember! TauH inherits ALL the flags from - - if (DEBUG){ - cout << " ++ " << iTauH << " id: " << tauH.pdgId() << " | pt: " << tauH.pt() << " | eta: " << tauH.eta() << endl; - cout << " ++ " << iTauH << " id: " << tauH_neutral.pdgId() << " | pt: " << tauH_neutral.pt() << " | eta: " << tauH_neutral.eta() << endl; - } - result->push_back (tauH); - result->push_back (tauH_neutral); - } - - - iEvent.put(std::move(result)); - - -} - -// set of requirement(s) defining which particle must be saved -bool GenFiller::IsInteresting (const GenParticle& p) -{ - int APdgId = abs(p.pdgId()); - - bool IsLast = genhelper::IsLastCopy(p); - bool GoodPdgId = (APdgId == 25 || APdgId == 36 || APdgId == 23 || APdgId == 24 ||// bosons - APdgId == 1000022 || APdgId == 1000023 || APdgId == 1000025 ||// SUSY particles - APdgId == 6 || // quarks - APdgId == 11 || APdgId == 12 || APdgId == 13 || APdgId == 14 || APdgId == 15 || APdgId == 16); // leptons - - if(isVBFParton(p)) return true ; - - if (IsLast && GoodPdgId) return true; - - // case of b quarks, just save first one (too many showering products) - bool IsFirst = genhelper::IsFirstCopy(p, true); - bool GoodFirstPdg = (APdgId == 5 || APdgId == 6 || APdgId == 11 || APdgId == 13 || APdgId == 15 || APdgId==25); - if (storeLightFlavAndGlu_) // also light flavors and quarks - GoodFirstPdg = (GoodFirstPdg || APdgId == 1 || APdgId == 2 || APdgId == 3 || APdgId == 4 || APdgId == 21); - - if (GoodFirstPdg && IsFirst) return true; - - - return false; -} - - -int GenFiller::makeFlagVector (const GenParticle* p) -{ - int flags = 0; - const GenStatusFlags& fl = p->statusFlags(); - - if (fl.isPrompt()) flags |= (1 << 0); - if (fl.isDecayedLeptonHadron()) flags |= (1 << 1); - if (fl.isTauDecayProduct()) flags |= (1 << 2); - if (fl.isPromptTauDecayProduct()) flags |= (1 << 3); - if (fl.isDirectTauDecayProduct()) flags |= (1 << 4); - if (fl.isDirectPromptTauDecayProduct()) flags |= (1 << 5); - if (fl.isDirectHadronDecayProduct()) flags |= (1 << 6); - if (fl.isHardProcess()) flags |= (1 << 7); - if (fl.fromHardProcess()) flags |= (1 << 8); - if (fl.isHardProcessTauDecayProduct()) flags |= (1 << 9); - if (fl.isDirectHardProcessTauDecayProduct()) flags |= (1 << 10); - if (fl.fromHardProcessBeforeFSR()) flags |= (1 << 11); - if (fl.isFirstCopy()) flags |= (1 << 12); - if (fl.isLastCopy()) flags |= (1 << 13); - if (fl.isLastCopyBeforeFSR()) flags |= (1 << 14); - if (isVBFParton(*p)) flags |= (1 << 15); - return flags; -} - -bool GenFiller::isVBFParton(const GenParticle& p) -{ - int APdgId = abs(p.pdgId()); - bool IsVBFPartonPdgId = (APdgId == 1 || APdgId == 2 || APdgId == 3 || APdgId == 4 || APdgId == 5 || APdgId == 21);// quark or gluon - bool FoundHiggs = false; - - if(IsVBFPartonPdgId) - { - for(unsigned int iMother = 0 ; iMother < p.numberOfMothers() ; ++iMother) - { - const reco::Candidate* Mother = p.mother(iMother); - for(unsigned int iDaughter = 0 ; iDaughter < Mother->numberOfDaughters() ; ++iDaughter) - { - const reco::Candidate* Daughter = Mother->daughter(iDaughter); - if(Daughter->pdgId()==25) FoundHiggs = true ; - if(FoundHiggs) break; - } - } - } - return FoundHiggs ; -} - - -#include -DEFINE_FWK_MODULE(GenFiller); diff --git a/TauTagAndProbe/plugins/Ntuplizer.cc b/TauTagAndProbe/plugins/Ntuplizer.cc deleted file mode 100644 index 21ba953d98d..00000000000 --- a/TauTagAndProbe/plugins/Ntuplizer.cc +++ /dev/null @@ -1,1147 +0,0 @@ -#ifndef NTUPLIZER_H -#define NTUPLIZER_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -#include "FWCore/Framework/interface/EDAnalyzer.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include -#include "FWCore/ServiceRegistry/interface/Service.h" -#include "FWCore/Common/interface/TriggerNames.h" -#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" -#include "HLTrigger/HLTcore/interface/HLTPrescaleProvider.h" -#include "DataFormats/L1Trigger/interface/Tau.h" -#include "DataFormats/VertexReco/interface/Vertex.h" -#include "SimDataFormats/PileupSummaryInfo/interface/PileupSummaryInfo.h" -#include "DataFormats/JetReco/interface/CaloJet.h" -#include "DataFormats/BTauReco/interface/JetTag.h" -#include -#include "DataFormats/Common/interface/TriggerResults.h" - -#include "SimDataFormats/GeneratorProducts/interface/GenEventInfoProduct.h" - -#include "tParameterSet.h" - -#include "CommonTools/UtilAlgos/interface/TFileService.h" - - - -//Set this variable to decide the number of triggers that you want to check simultaneously -#define NUMBER_OF_MAXIMUM_TRIGGERS 64 - - -/* -██████ ███████ ██████ ██ █████ ██████ █████ ████████ ██ ██████ ███ ██ -██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ -██ ██ █████ ██ ██ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ -██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ -██████ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -class Ntuplizer : public edm::EDAnalyzer { - public: - /// Constructor - explicit Ntuplizer(const edm::ParameterSet&); - /// Destructor - virtual ~Ntuplizer(); - - private: - //----edm control--- - virtual void beginJob() ; - virtual void beginRun(edm::Run const&, edm::EventSetup const&); - virtual void analyze(const edm::Event&, const edm::EventSetup&); - virtual void endJob(); - virtual void endRun(edm::Run const&, edm::EventSetup const&); - void Initialize(); - bool hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor); - int GenIndex(const pat::TauRef& tau, const edm::View* genparts); - float ComputeMT (math::XYZTLorentzVector visP4, const pat::MET& met); - - bool _isMC; - - TTree *_tree; - TTree *_triggerNamesTree; - std::string _treeName; - // ------------------------------------- - // variables to be filled in output tree - ULong64_t _indexevents; - Int_t _runNumber; - Int_t _lumi; - Int_t _PS_column; - - float _MC_weight; - - unsigned long _tauTriggerBits; - float _tauPt; - float _tauEta; - float _tauPhi; - int _tauDM; - float _tauMass; - float _mT; - float _mVis; - int _tau_genindex; - bool _decayModeFinding; - bool _decayModeFindingNewDMs; - - bool _byLooseCombinedIsolationDeltaBetaCorr3Hits; - bool _byMediumCombinedIsolationDeltaBetaCorr3Hits; - bool _byTightCombinedIsolationDeltaBetaCorr3Hits; - bool _byVLooseIsolationMVArun2v1DBoldDMwLT; - bool _byLooseIsolationMVArun2v1DBoldDMwLT; - bool _byMediumIsolationMVArun2v1DBoldDMwLT; - bool _byTightIsolationMVArun2v1DBoldDMwLT; - bool _byVTightIsolationMVArun2v1DBoldDMwLT; - bool _byVLooseIsolationMVArun2v1DBnewDMwLT; - bool _byLooseIsolationMVArun2v1DBnewDMwLT; - bool _byMediumIsolationMVArun2v1DBnewDMwLT; - bool _byTightIsolationMVArun2v1DBnewDMwLT; - bool _byVTightIsolationMVArun2v1DBnewDMwLT; - bool _byLooseIsolationMVArun2v1DBdR03oldDMwLT; - bool _byMediumIsolationMVArun2v1DBdR03oldDMwLT; - bool _byTightIsolationMVArun2v1DBdR03oldDMwLT; - bool _byVTightIsolationMVArun2v1DBdR03oldDMwLT; - - // 2017v1 training for Fall 17 - float _byIsolationMVArun2017v1DBoldDMwLTraw2017; - bool _byVVLooseIsolationMVArun2017v1DBoldDMwLT2017; - bool _byVLooseIsolationMVArun2017v1DBoldDMwLT2017; - bool _byLooseIsolationMVArun2017v1DBoldDMwLT2017; - bool _byMediumIsolationMVArun2017v1DBoldDMwLT2017; - bool _byTightIsolationMVArun2017v1DBoldDMwLT2017; - bool _byVTightIsolationMVArun2017v1DBoldDMwLT2017; - bool _byVVTightIsolationMVArun2017v1DBoldDMwLT2017; - - // 2017v2 training for Fall 17 - float _byIsolationMVArun2017v2DBoldDMwLTraw2017; - bool _byVVLooseIsolationMVArun2017v2DBoldDMwLT2017; - bool _byVLooseIsolationMVArun2017v2DBoldDMwLT2017; - bool _byLooseIsolationMVArun2017v2DBoldDMwLT2017; - bool _byMediumIsolationMVArun2017v2DBoldDMwLT2017; - bool _byTightIsolationMVArun2017v2DBoldDMwLT2017; - bool _byVTightIsolationMVArun2017v2DBoldDMwLT2017; - bool _byVVTightIsolationMVArun2017v2DBoldDMwLT2017; - - // dR0p32017v2 training for Fall 17 - float _byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017; - bool _byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - bool _byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - bool _byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - bool _byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - bool _byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - bool _byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - bool _byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017; - - // newDM2017v2 training for Fall 17 - float _byIsolationMVArun2017v2DBnewDMwLTraw2017; - bool _byVVLooseIsolationMVArun2017v2DBnewDMwLT2017; - bool _byVLooseIsolationMVArun2017v2DBnewDMwLT2017; - bool _byLooseIsolationMVArun2017v2DBnewDMwLT2017; - bool _byMediumIsolationMVArun2017v2DBnewDMwLT2017; - bool _byTightIsolationMVArun2017v2DBnewDMwLT2017; - bool _byVTightIsolationMVArun2017v2DBnewDMwLT2017; - bool _byVVTightIsolationMVArun2017v2DBnewDMwLT2017; - - - bool _againstMuonLoose3; - bool _againstMuonTight3; - bool _againstElectronVLooseMVA6; - bool _againstElectronLooseMVA6; - bool _againstElectronMediumMVA6; - bool _againstElectronTightMVA6; - bool _againstElectronVTightMVA6; - - vector _hltPt; - vector _hltEta; - vector _hltPhi; - vector _hltMass; - float _hltL2CaloJetPt; - float _hltL2CaloJetEta; - float _hltL2CaloJetPhi; - float _hltL2CaloJetIso; - float _hltL2CaloJetIsoPixPt; - float _hltL2CaloJetIsoPixEta; - float _hltL2CaloJetIsoPixPhi; - float _hltPFTauTrackPt; - float _hltPFTauTrackEta; - float _hltPFTauTrackPhi; - float _hltPFTauTrackRegPt; - float _hltPFTauTrackRegEta; - float _hltPFTauTrackRegPhi; - float _hltPFTau35TrackPt1RegPt; - float _hltPFTau35TrackPt1RegEta; - float _hltPFTau35TrackPt1RegPhi; - float _hltHPSPFTauTrackPt; - float _hltHPSPFTauTrackEta; - float _hltHPSPFTauTrackPhi; - float _hltHPSPFTauTrackRegPt; - float _hltHPSPFTauTrackRegEta; - float _hltHPSPFTauTrackRegPhi; - - int _l1tQual; - float _l1tPt; - float _l1tEta; - float _l1tPhi; - int _l1tIso; - int _l1tEmuQual; - float _l1tEmuPt; - float _l1tEmuEta; - float _l1tEmuPhi; - int _l1tEmuIso; - int _l1tEmuNTT; - int _l1tEmuHasEM; - int _l1tEmuIsMerged; - int _l1tEmuTowerIEta; - int _l1tEmuTowerIPhi; - int _l1tEmuRawEt; - int _l1tEmuIsoEt; - Bool_t _hasTriggerMuonType; - Bool_t _hasTriggerTauType; - Bool_t _isMatched; - Bool_t _isOS; - int _foundJet; - float _muonPt; - float _muonEta; - float _muonPhi; - float _MET; - int _Nvtx; - float _nTruePU; - - edm::EDGetTokenT _genTag; - edm::EDGetTokenT > _genPartTag; - - edm::EDGetTokenT _muonsTag; - edm::EDGetTokenT _tauTag; - edm::EDGetTokenT _metTag; - edm::EDGetTokenT _triggerObjects; - edm::EDGetTokenT _triggerBits; - edm::EDGetTokenT _L1TauTag ; - edm::EDGetTokenT _L1EmuTauTag ; - edm::EDGetTokenT> _VtxTag; - edm::EDGetTokenT> _puTag; - edm::EDGetTokenT _hltL2CaloJet_ForIsoPix_Tag; - edm::EDGetTokenT _hltL2CaloJet_ForIsoPix_IsoTag; - - //!Contains the parameters - tVParameterSet _parameters; - tVParameterSet _parameters_Tag; - - edm::InputTag _processName; - //! Maximum - std::bitset _tauTriggerBitSet; - - - - HLTConfigProvider _hltConfig; - HLTPrescaleProvider* _hltPrescale; - - -}; - -/* -██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██ -██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ -██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██ -██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ -██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -// ----Constructor and Destructor ----- -Ntuplizer::Ntuplizer(const edm::ParameterSet& iConfig) : -_genTag (consumes (iConfig.getParameter("genCollection"))), -_genPartTag (consumes> (iConfig.getParameter("genPartCollection"))), -_muonsTag (consumes (iConfig.getParameter("muons"))), -_tauTag (consumes (iConfig.getParameter("taus"))), -_metTag (consumes (iConfig.getParameter("met"))), -_triggerObjects (consumes (iConfig.getParameter("triggerSet"))), -_triggerBits (consumes (iConfig.getParameter("triggerResultsLabel"))), -_L1TauTag (consumes (iConfig.getParameter("L1Tau"))), -_L1EmuTauTag (consumes (iConfig.getParameter("L1EmuTau"))), -_VtxTag (consumes> (iConfig.getParameter("Vertexes"))), -_puTag (consumes> (iConfig.getParameter("puInfo"))), -_hltL2CaloJet_ForIsoPix_Tag(consumes (iConfig.getParameter("L2CaloJet_ForIsoPix_Collection"))), -_hltL2CaloJet_ForIsoPix_IsoTag(consumes (iConfig.getParameter("L2CaloJet_ForIsoPix_IsoCollection"))) -{ - - _isMC = iConfig.getParameter("isMC"); - - - _hltPrescale = new HLTPrescaleProvider(iConfig,consumesCollector(),*this); - - _treeName = iConfig.getParameter("treeName"); - _processName = iConfig.getParameter("triggerResultsLabel"); - - TString triggerName; - edm::Service fs; - _triggerNamesTree = fs -> make("triggerNames", "triggerNames"); - _triggerNamesTree -> Branch("triggerNames",&triggerName); - - //Building the trigger arrays - const std::vector& HLTList = iConfig.getParameter > ("triggerList"); - for (const edm::ParameterSet& parameterSet : HLTList) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - triggerName = pSet.hltPath; - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - _parameters.push_back(pSet); - - _triggerNamesTree -> Fill(); - } - - const std::vector& HLTList_Tag = iConfig.getParameter > ("triggerList_tag"); - for (const edm::ParameterSet& parameterSet : HLTList_Tag) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - _parameters_Tag.push_back(pSet); - } - - - - this -> Initialize(); - return; -} - -Ntuplizer::~Ntuplizer() -{ - delete _hltPrescale; -} - -void Ntuplizer::beginRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - Bool_t changedConfig = false; - - if(!_hltConfig.init(iRun, iSetup, _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTConfigProvider failed!!"; - return; - } - - if(!_hltPrescale->init(iRun, iSetup, _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTPrescaleProvider failed!!"; - return; - } - - const edm::TriggerNames::Strings& triggerNames = _hltConfig.triggerNames(); - std::cout << " ===== LOOKING FOR THE PATH INDEXES =====" << std::endl; - for (tParameterSet& parameter : _parameters){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - //std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - - - - std::cout << " ===== LOOKING FOR THE PATH INDEXES FOR TAG=====" << std::endl; - for (tParameterSet& parameter : _parameters_Tag){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - // std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - -} - -void Ntuplizer::Initialize() { - _indexevents = 0; - _runNumber = 0; - _lumi = 0; - _PS_column = -1; - - _MC_weight = 1; - - _tauPt = -1.; - _tauEta = -1.; - _tauPhi = -1.; - _tauDM = -1; - _tauMass = -1; - _mT = -1.; - _mVis = -1.; - _tau_genindex = -1; - - _decayModeFinding = 0; - _decayModeFindingNewDMs = 0; - - _byLooseCombinedIsolationDeltaBetaCorr3Hits = 0; - _byMediumCombinedIsolationDeltaBetaCorr3Hits = 0; - _byTightCombinedIsolationDeltaBetaCorr3Hits = 0; - _byVLooseIsolationMVArun2v1DBoldDMwLT = 0; - _byLooseIsolationMVArun2v1DBoldDMwLT = 0; - _byMediumIsolationMVArun2v1DBoldDMwLT = 0; - _byTightIsolationMVArun2v1DBoldDMwLT = 0; - _byVTightIsolationMVArun2v1DBoldDMwLT = 0; - _byVLooseIsolationMVArun2v1DBnewDMwLT = 0; - _byLooseIsolationMVArun2v1DBnewDMwLT = 0; - _byMediumIsolationMVArun2v1DBnewDMwLT = 0; - _byTightIsolationMVArun2v1DBnewDMwLT = 0; - _byVTightIsolationMVArun2v1DBnewDMwLT = 0; - _byLooseIsolationMVArun2v1DBdR03oldDMwLT = 0; - _byMediumIsolationMVArun2v1DBdR03oldDMwLT = 0; - _byTightIsolationMVArun2v1DBdR03oldDMwLT = 0; - _byVTightIsolationMVArun2v1DBdR03oldDMwLT = 0; - - // 2017v1 training for Fall 17 - _byIsolationMVArun2017v1DBoldDMwLTraw2017 = 0; - _byVVLooseIsolationMVArun2017v1DBoldDMwLT2017 = 0; - _byVLooseIsolationMVArun2017v1DBoldDMwLT2017 = 0; - _byLooseIsolationMVArun2017v1DBoldDMwLT2017 = 0; - _byMediumIsolationMVArun2017v1DBoldDMwLT2017 = 0; - _byTightIsolationMVArun2017v1DBoldDMwLT2017 = 0; - _byVTightIsolationMVArun2017v1DBoldDMwLT2017 = 0; - _byVVTightIsolationMVArun2017v1DBoldDMwLT2017 = 0; - - // 2017v2 training for Fall 17 - _byIsolationMVArun2017v2DBoldDMwLTraw2017 = 0; - _byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 = 0; - _byVLooseIsolationMVArun2017v2DBoldDMwLT2017 = 0; - _byLooseIsolationMVArun2017v2DBoldDMwLT2017 = 0; - _byMediumIsolationMVArun2017v2DBoldDMwLT2017 = 0; - _byTightIsolationMVArun2017v2DBoldDMwLT2017 = 0; - _byVTightIsolationMVArun2017v2DBoldDMwLT2017 = 0; - _byVVTightIsolationMVArun2017v2DBoldDMwLT2017 = 0; - - // dR0p32017v2 training for Fall 17 - _byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017 = 0; - _byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - _byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - _byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - _byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - _byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - _byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - _byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = 0; - - // newDM2017v2 training for Fall 17 - _byIsolationMVArun2017v2DBnewDMwLTraw2017 = 0; - _byVVLooseIsolationMVArun2017v2DBnewDMwLT2017 = 0; - _byVLooseIsolationMVArun2017v2DBnewDMwLT2017 = 0; - _byLooseIsolationMVArun2017v2DBnewDMwLT2017 = 0; - _byMediumIsolationMVArun2017v2DBnewDMwLT2017 = 0; - _byTightIsolationMVArun2017v2DBnewDMwLT2017 = 0; - _byVTightIsolationMVArun2017v2DBnewDMwLT2017 = 0; - _byVVTightIsolationMVArun2017v2DBnewDMwLT2017 = 0; - - _againstMuonLoose3 = 0; - _againstMuonTight3 = 0; - _againstElectronVLooseMVA6 = 0; - _againstElectronLooseMVA6 = 0; - _againstElectronMediumMVA6 = 0; - _againstElectronTightMVA6 = 0; - _againstElectronVTightMVA6 = 0; - - _muonPt = -1.; - _muonEta = -1.; - _muonPhi = -1.; - _MET = -1.; - _isMatched = false; - - _hltPt.assign(NUMBER_OF_MAXIMUM_TRIGGERS,-1); - _hltEta.assign(NUMBER_OF_MAXIMUM_TRIGGERS,666); - _hltPhi.assign(NUMBER_OF_MAXIMUM_TRIGGERS,666); - _hltMass.assign(NUMBER_OF_MAXIMUM_TRIGGERS,666); - _hltL2CaloJetPt = -1; - _hltL2CaloJetEta = 666; - _hltL2CaloJetPhi = 666; - _hltL2CaloJetIso = -1; - _hltL2CaloJetIsoPixPt = -1; - _hltL2CaloJetIsoPixEta = 666; - _hltL2CaloJetIsoPixPhi = 666; - _hltPFTauTrackPt = -1; - _hltPFTauTrackEta = 666; - _hltPFTauTrackPhi = 666; - _hltPFTauTrackRegPt = -1; - _hltPFTauTrackRegEta = 666; - _hltPFTauTrackRegPhi = 666; - _hltPFTau35TrackPt1RegPt = -1; - _hltPFTau35TrackPt1RegEta = 666; - _hltPFTau35TrackPt1RegPhi = 666; - - _hltHPSPFTauTrackPt = -1; - _hltHPSPFTauTrackEta = 666; - _hltHPSPFTauTrackPhi = 666; - _hltHPSPFTauTrackRegPt = -1; - _hltHPSPFTauTrackRegEta = 666; - _hltHPSPFTauTrackRegPhi = 666; - - _l1tPt = -1; - _l1tEta = 666; - _l1tPhi = 666; - _l1tQual = -1; - _l1tIso = -1; - _l1tEmuPt = -1; - _l1tEmuEta = 666; - _l1tEmuPhi = 666; - _l1tEmuQual = -1; - _l1tEmuIso = -1; - _l1tEmuNTT = -1; - _l1tEmuHasEM = -1; - _l1tEmuIsMerged = -1; - _l1tEmuTowerIEta = -1; - _l1tEmuTowerIPhi = -1; - _l1tEmuRawEt = -1; - _l1tEmuIsoEt = -1; - _foundJet = 0; -} - - -void Ntuplizer::beginJob() -{ - edm::Service fs; - _tree = fs -> make(this -> _treeName.c_str(), this -> _treeName.c_str()); - - //Branches - _tree -> Branch("EventNumber",&_indexevents,"EventNumber/l"); - _tree -> Branch("RunNumber",&_runNumber,"RunNumber/I"); - _tree -> Branch("lumi",&_lumi,"lumi/I"); - _tree -> Branch("PS_column",&_PS_column,"PS_column/I"); - - _tree -> Branch("MC_weight",&_MC_weight,"MC_weight/F"); - - _tree -> Branch("tauTriggerBits", &_tauTriggerBits, "tauTriggerBits/l"); - _tree -> Branch("tauPt", &_tauPt, "tauPt/F"); - _tree -> Branch("tauEta", &_tauEta, "tauEta/F"); - _tree -> Branch("tauPhi", &_tauPhi, "tauPhi/F"); - _tree -> Branch("tauDM", &_tauDM, "tauDM/I"); - _tree -> Branch("tauMass", &_tauMass, "tauMass/F"); - _tree -> Branch("mT", &_mT, "mT/F"); - _tree -> Branch("mVis", &_mVis, "mVis/F"); - _tree -> Branch("tau_genindex", &_tau_genindex, "tau_genindex/I"); - _tree -> Branch("decayModeFinding", &_decayModeFinding, "decayModeFinding/O"); - _tree -> Branch("decayModeFindingNewDMs", &_decayModeFindingNewDMs, "decayModeFindingNewDMs/O"); - - _tree -> Branch("byLooseCombinedIsolationDeltaBetaCorr3Hits", &_byLooseCombinedIsolationDeltaBetaCorr3Hits, "byLooseCombinedIsolationDeltaBetaCorr3Hits/O"); - _tree -> Branch("byMediumCombinedIsolationDeltaBetaCorr3Hits", &_byMediumCombinedIsolationDeltaBetaCorr3Hits, "byMediumCombinedIsolationDeltaBetaCorr3Hits/O"); - _tree -> Branch("byTightCombinedIsolationDeltaBetaCorr3Hits", &_byTightCombinedIsolationDeltaBetaCorr3Hits, "byTightCombinedIsolationDeltaBetaCorr3Hits/O"); - _tree -> Branch("byVLooseIsolationMVArun2v1DBoldDMwLT", &_byVLooseIsolationMVArun2v1DBoldDMwLT, "byVLooseIsolationMVArun2v1DBoldDMwLT/O"); - _tree -> Branch("byLooseIsolationMVArun2v1DBoldDMwLT", &_byLooseIsolationMVArun2v1DBoldDMwLT, "byLooseIsolationMVArun2v1DBoldDMwLT/O"); - _tree -> Branch("byMediumIsolationMVArun2v1DBoldDMwLT", &_byMediumIsolationMVArun2v1DBoldDMwLT, "byMediumIsolationMVArun2v1DBoldDMwLT/O"); - _tree -> Branch("byTightIsolationMVArun2v1DBoldDMwLT", &_byTightIsolationMVArun2v1DBoldDMwLT, "byTightIsolationMVArun2v1DBoldDMwLT/O"); - _tree -> Branch("byVTightIsolationMVArun2v1DBoldDMwLT", &_byVTightIsolationMVArun2v1DBoldDMwLT, "byVTightIsolationMVArun2v1DBoldDMwLT/O"); - _tree -> Branch("byVLooseIsolationMVArun2v1DBnewDMwLT", &_byVLooseIsolationMVArun2v1DBnewDMwLT, "byVLooseIsolationMVArun2v1DBnewDMwLT/O"); - _tree -> Branch("byLooseIsolationMVArun2v1DBnewDMwLT", &_byLooseIsolationMVArun2v1DBnewDMwLT, "byLooseIsolationMVArun2v1DBnewDMwLT/O"); - _tree -> Branch("byMediumIsolationMVArun2v1DBnewDMwLT", &_byMediumIsolationMVArun2v1DBnewDMwLT, "byMediumIsolationMVArun2v1DBnewDMwLT/O"); - _tree -> Branch("byTightIsolationMVArun2v1DBnewDMwLT", &_byTightIsolationMVArun2v1DBnewDMwLT, "byTightIsolationMVArun2v1DBnewDMwLT/O"); - _tree -> Branch("byVTightIsolationMVArun2v1DBnewDMwLT", &_byVTightIsolationMVArun2v1DBnewDMwLT, "byVTightIsolationMVArun2v1DBnewDMwLT/O"); - _tree -> Branch("byLooseIsolationMVArun2v1DBdR03oldDMwLT", &_byLooseIsolationMVArun2v1DBdR03oldDMwLT, "byLooseIsolationMVArun2v1DBdR03oldDMwLT/O"); - _tree -> Branch("byMediumIsolationMVArun2v1DBdR03oldDMwLT", &_byMediumIsolationMVArun2v1DBdR03oldDMwLT, "byMediumIsolationMVArun2v1DBdR03oldDMwLT/O"); - _tree -> Branch("byTightIsolationMVArun2v1DBdR03oldDMwLT", &_byTightIsolationMVArun2v1DBdR03oldDMwLT, "byTightIsolationMVArun2v1DBdR03oldDMwLT/O"); - _tree -> Branch("byVTightIsolationMVArun2v1DBdR03oldDMwLT", &_byVTightIsolationMVArun2v1DBdR03oldDMwLT, "byVTightIsolationMVArun2v1DBdR03oldDMwLT/O"); - - // 2017v1 training for Fall 17 - _tree -> Branch("byIsolationMVArun2017v1DBoldDMwLTraw2017", &_byIsolationMVArun2017v1DBoldDMwLTraw2017, "byIsolationMVArun2017v1DBoldDMwLTraw2017/F"); - _tree -> Branch("byVVLooseIsolationMVArun2017v1DBoldDMwLT2017", &_byVVLooseIsolationMVArun2017v1DBoldDMwLT2017, "byVVLooseIsolationMVArun2017v1DBoldDMwLT2017/O"); - _tree -> Branch("byVLooseIsolationMVArun2017v1DBoldDMwLT2017", &_byVLooseIsolationMVArun2017v1DBoldDMwLT2017, "byVLooseIsolationMVArun2017v1DBoldDMwLT2017/O"); - _tree -> Branch("byLooseIsolationMVArun2017v1DBoldDMwLT2017", &_byLooseIsolationMVArun2017v1DBoldDMwLT2017, "byLooseIsolationMVArun2017v1DBoldDMwLT2017/O"); - _tree -> Branch("byMediumIsolationMVArun2017v1DBoldDMwLT2017", &_byMediumIsolationMVArun2017v1DBoldDMwLT2017, "byMediumIsolationMVArun2017v1DBoldDMwLT2017/O"); - _tree -> Branch("byTightIsolationMVArun2017v1DBoldDMwLT2017", &_byTightIsolationMVArun2017v1DBoldDMwLT2017, "byTightIsolationMVArun2017v1DBoldDMwLT2017/O"); - _tree -> Branch("byVTightIsolationMVArun2017v1DBoldDMwLT2017", &_byVTightIsolationMVArun2017v1DBoldDMwLT2017, "byVTightIsolationMVArun2017v1DBoldDMwLT2017/O"); - _tree -> Branch("byVVTightIsolationMVArun2017v1DBoldDMwLT2017", &_byVVTightIsolationMVArun2017v1DBoldDMwLT2017, "byVVTightIsolationMVArun2017v1DBoldDMwLT2017/O"); - - // 2017v2 training for Fall 17 - _tree -> Branch("byIsolationMVArun2017v2DBoldDMwLTraw2017", &_byIsolationMVArun2017v2DBoldDMwLTraw2017, "byIsolationMVArun2017v2DBoldDMwLTraw2017/F"); - _tree -> Branch("byVVLooseIsolationMVArun2017v2DBoldDMwLT2017", &_byVVLooseIsolationMVArun2017v2DBoldDMwLT2017, "byVVLooseIsolationMVArun2017v2DBoldDMwLT2017/O"); - _tree -> Branch("byVLooseIsolationMVArun2017v2DBoldDMwLT2017", &_byVLooseIsolationMVArun2017v2DBoldDMwLT2017, "byVLooseIsolationMVArun2017v2DBoldDMwLT2017/O"); - _tree -> Branch("byLooseIsolationMVArun2017v2DBoldDMwLT2017", &_byLooseIsolationMVArun2017v2DBoldDMwLT2017, "byLooseIsolationMVArun2017v2DBoldDMwLT2017/O"); - _tree -> Branch("byMediumIsolationMVArun2017v2DBoldDMwLT2017", &_byMediumIsolationMVArun2017v2DBoldDMwLT2017, "byMediumIsolationMVArun2017v2DBoldDMwLT2017/O"); - _tree -> Branch("byTightIsolationMVArun2017v2DBoldDMwLT2017", &_byTightIsolationMVArun2017v2DBoldDMwLT2017, "byTightIsolationMVArun2017v2DBoldDMwLT2017/O"); - _tree -> Branch("byVTightIsolationMVArun2017v2DBoldDMwLT2017", &_byVTightIsolationMVArun2017v2DBoldDMwLT2017, "byVTightIsolationMVArun2017v2DBoldDMwLT2017/O"); - _tree -> Branch("byVVTightIsolationMVArun2017v2DBoldDMwLT2017", &_byVVTightIsolationMVArun2017v2DBoldDMwLT2017, "byVVTightIsolationMVArun2017v2DBoldDMwLT2017/O"); - - // dR0p32017v2 training for Fall 17 - _tree -> Branch("byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017", &_byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017, "byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017/F"); - _tree -> Branch("byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - _tree -> Branch("byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - _tree -> Branch("byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - _tree -> Branch("byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - _tree -> Branch("byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - _tree -> Branch("byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - _tree -> Branch("byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017", &_byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017, "byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017/O"); - - // newDM2017v2 training for Fall 17 - _tree -> Branch("byIsolationMVArun2017v2DBnewDMwLTraw2017", &_byIsolationMVArun2017v2DBnewDMwLTraw2017, "byIsolationMVArun2017v2DBnewDMwLTraw2017/F"); - _tree -> Branch("byVVLooseIsolationMVArun2017v2DBnewDMwLT2017", &_byVVLooseIsolationMVArun2017v2DBnewDMwLT2017, "byVVLooseIsolationMVArun2017v2DBnewDMwLT2017/O"); - _tree -> Branch("byVLooseIsolationMVArun2017v2DBnewDMwLT2017", &_byVLooseIsolationMVArun2017v2DBnewDMwLT2017, "byVLooseIsolationMVArun2017v2DBnewDMwLT2017/O"); - _tree -> Branch("byLooseIsolationMVArun2017v2DBnewDMwLT2017", &_byLooseIsolationMVArun2017v2DBnewDMwLT2017, "byLooseIsolationMVArun2017v2DBnewDMwLT2017/O"); - _tree -> Branch("byMediumIsolationMVArun2017v2DBnewDMwLT2017", &_byMediumIsolationMVArun2017v2DBnewDMwLT2017, "byMediumIsolationMVArun2017v2DBnewDMwLT2017/O"); - _tree -> Branch("byTightIsolationMVArun2017v2DBnewDMwLT2017", &_byTightIsolationMVArun2017v2DBnewDMwLT2017, "byTightIsolationMVArun2017v2DBnewDMwLT2017/O"); - _tree -> Branch("byVTightIsolationMVArun2017v2DBnewDMwLT2017", &_byVTightIsolationMVArun2017v2DBnewDMwLT2017, "byVTightIsolationMVArun2017v2DBnewDMwLT2017/O"); - _tree -> Branch("byVVTightIsolationMVArun2017v2DBnewDMwLT2017", &_byVVTightIsolationMVArun2017v2DBnewDMwLT2017, "byVVTightIsolationMVArun2017v2DBnewDMwLT2017/O"); - - _tree -> Branch("againstMuonLoose3", &_againstMuonLoose3, "againstMuonLoose3/O");; - _tree -> Branch("againstMuonTight3", &_againstMuonTight3, "againstMuonTight3/O"); - _tree -> Branch("againstElectronVLooseMVA6", &_againstElectronVLooseMVA6, "againstElectronVLooseMVA6/O"); - _tree -> Branch("againstElectronLooseMVA6", &_againstElectronLooseMVA6, "againstElectronLooseMVA6/O"); - _tree -> Branch("againstElectronMediumMVA6", &_againstElectronMediumMVA6, "againstElectronMediumMVA6/O"); - _tree -> Branch("againstElectronTightMVA6", &_againstElectronTightMVA6, "againstElectronTightMVA6/O"); - _tree -> Branch("againstElectronVTightMVA6", &_againstElectronVTightMVA6, "againstElectronVTightMVA6/O"); - - _tree -> Branch("muonPt", &_muonPt, "muonPt/F"); - _tree -> Branch("muonEta", &_muonEta, "muonEta/F"); - _tree -> Branch("muonPhi", &_muonPhi, "muonPhi/F"); - - _tree -> Branch("MET", &_MET, "MET/F"); - - _tree -> Branch("hltPt", &_hltPt); - _tree -> Branch("hltEta", &_hltEta); - _tree -> Branch("hltPhi", &_hltPhi); - _tree -> Branch("hltMass", &_hltMass); - - _tree -> Branch("hltL2CaloJetPt", &_hltL2CaloJetPt, "hltL2CaloJetPt/F"); - _tree -> Branch("hltL2CaloJetEta", &_hltL2CaloJetEta, "hltL2CaloJetEta/F"); - _tree -> Branch("hltL2CaloJetPhi", &_hltL2CaloJetPhi, "hltL2CaloJetPhi/F"); - _tree -> Branch("hltL2CaloJetIso", &_hltL2CaloJetIso, "hltL2CaloJetIso/F"); - _tree -> Branch("hltL2CaloJetIsoPixPt", &_hltL2CaloJetIsoPixPt, "hltL2CaloJetIsoPixPt/F"); - _tree -> Branch("hltL2CaloJetIsoPixEta", &_hltL2CaloJetIsoPixEta, "hltL2CaloJetIsoPixEta/F"); - _tree -> Branch("hltL2CaloJetIsoPixPhi", &_hltL2CaloJetIsoPixPhi, "hltL2CaloJetIsoPixPhi/F"); - - _tree -> Branch("hltPFTauTrackPt", &_hltPFTauTrackPt, "hltPFTauTrackPt/F"); - _tree -> Branch("hltPFTauTrackEta", &_hltPFTauTrackEta, "hltPFTauTrackEta/F");; - _tree -> Branch("hltPFTauTrackPhi", &_hltPFTauTrackPhi, "hltPFTauTrackPhi/F"); - _tree -> Branch("hltPFTauTrackRegPt", &_hltPFTauTrackRegPt, "hltPFTauTrackRegPt/F"); - _tree -> Branch("hltPFTauTrackRegEta", &_hltPFTauTrackRegEta, "hltPFTauTrackRegEta/F");; - _tree -> Branch("hltPFTauTrackRegPhi", &_hltPFTauTrackRegPhi, "hltPFTauTrackRegPhi/F"); - _tree -> Branch("hltPFTau35TrackPt1RegPt", &_hltPFTau35TrackPt1RegPt, "hltPFTau35TrackPt1RegPt/F"); - _tree -> Branch("hltPFTau35TrackPt1RegEta", &_hltPFTau35TrackPt1RegEta, "hltPFTau35TrackPt1RegEta/F");; - _tree -> Branch("hltPFTau35TrackPt1RegPhi", &_hltPFTau35TrackPt1RegPhi, "hltPFTau35TrackPt1RegPhi/F"); - - _tree -> Branch("hltHPSPFTauTrackPt", &_hltHPSPFTauTrackPt, "hltHPSPFTauTrackPt/F"); - _tree -> Branch("hltHPSPFTauTrackEta", &_hltHPSPFTauTrackEta, "hltHPSPFTauTrackEta/F");; - _tree -> Branch("hltHPSPFTauTrackPhi", &_hltHPSPFTauTrackPhi, "hltHPSPFTauTrackPhi/F"); - _tree -> Branch("hltHPSPFTauTrackRegPt", &_hltHPSPFTauTrackRegPt, "hltHPSPFTauTrackRegPt/F"); - _tree -> Branch("hltHPSPFTauTrackRegEta", &_hltHPSPFTauTrackRegEta, "hltHPSPFTauTrackRegEta/F");; - _tree -> Branch("hltHPSPFTauTrackRegPhi", &_hltHPSPFTauTrackRegPhi, "hltHPSPFTauTrackRegPhi/F"); - - _tree -> Branch("l1tPt", &_l1tPt, "l1tPt/F"); - _tree -> Branch("l1tEta", &_l1tEta, "l1tEta/F"); - _tree -> Branch("l1tPhi", &_l1tPhi, "l1tPhi/F"); - _tree -> Branch("l1tQual", &_l1tQual, "l1tQual/I"); - _tree -> Branch("l1tIso", &_l1tIso, "l1tIso/I"); - _tree -> Branch("l1tEmuPt", &_l1tEmuPt, "l1tEmuPt/F"); - _tree -> Branch("l1tEmuEta", &_l1tEmuEta, "l1tEmuEta/F"); - _tree -> Branch("l1tEmuPhi", &_l1tEmuPhi, "l1tEmuPhi/F"); - _tree -> Branch("l1tEmuQual", &_l1tEmuQual, "l1tEmuQual/I"); - _tree -> Branch("l1tEmuIso", &_l1tEmuIso, "l1tEmuIso/I"); - _tree -> Branch("l1tEmuNTT", &_l1tEmuNTT, "l1tEmuNTT/I"); - _tree -> Branch("l1tEmuHasEM", &_l1tEmuHasEM, "l1tEmuHasEM/I"); - _tree -> Branch("l1tEmuIsMerged", &_l1tEmuIsMerged, "l1tEmuIsMerged/I"); - _tree -> Branch("l1tEmuTowerIEta", &_l1tEmuTowerIEta, "l1tEmuTowerIEta/I"); - _tree -> Branch("l1tEmuTowerIPhi", &_l1tEmuTowerIPhi, "l1tEmuTowerIPhi/I"); - _tree -> Branch("l1tEmuRawEt", &_l1tEmuRawEt, "l1tEmuRawEt/I"); - _tree -> Branch("l1tEmuIsoEt", &_l1tEmuIsoEt, "l1tEmuIsoEt/I"); - - - _tree -> Branch("hasTriggerMuonType", &_hasTriggerMuonType, "hasTriggerMuonType/O"); - _tree -> Branch("hasTriggerTauType", &_hasTriggerTauType, "hasTriggerTauType/O"); - _tree -> Branch("isMatched", &_isMatched, "isMatched/O"); - _tree -> Branch("isOS", &_isOS, "isOS/O"); - _tree -> Branch("foundJet", &_foundJet, "foundJet/I"); - _tree -> Branch("Nvtx", &_Nvtx, "Nvtx/I"); - _tree -> Branch("nTruePU", &_nTruePU, "nTruePU/F"); - - return; -} - - -void Ntuplizer::endJob() -{ - return; -} - - -void Ntuplizer::endRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - return; -} - - -void Ntuplizer::analyze(const edm::Event& iEvent, const edm::EventSetup& eSetup) -{ - this -> Initialize(); - - _indexevents = iEvent.id().event(); - _runNumber = iEvent.id().run(); - _lumi = iEvent.luminosityBlock(); - if(!_isMC) - _PS_column = _hltPrescale->prescaleSet(iEvent,eSetup); - - edm::Handle genEvt; - try {iEvent.getByToken(_genTag, genEvt);} catch (...) {;} - if(genEvt.isValid()) _MC_weight = genEvt->weight(); - - // search for the tag in the event - edm::Handle > genPartHandle; - edm::Handle muonHandle; - edm::Handle tauHandle; - edm::Handle metHandle; - edm::Handle triggerObjects; - edm::Handle triggerBits; - edm::Handle > vertexes; - edm::Handle> puInfo; - - edm::Handle< reco::CaloJetCollection > L2CaloJets_ForIsoPix_Handle; - edm::Handle< reco::JetTagCollection > L2CaloJets_ForIsoPix_IsoHandle; - - - if(_isMC) - iEvent.getByToken(_genPartTag, genPartHandle); - iEvent.getByToken(_muonsTag, muonHandle); - iEvent.getByToken(_tauTag, tauHandle); - iEvent.getByToken (_metTag, metHandle); - iEvent.getByToken(_triggerObjects, triggerObjects); - iEvent.getByToken(_triggerBits, triggerBits); - iEvent.getByToken(_VtxTag,vertexes); - iEvent.getByToken(_puTag, puInfo); - - try {iEvent.getByToken(_hltL2CaloJet_ForIsoPix_Tag, L2CaloJets_ForIsoPix_Handle);} catch (...) {;} - try {iEvent.getByToken(_hltL2CaloJet_ForIsoPix_IsoTag, L2CaloJets_ForIsoPix_IsoHandle);} catch (...) {;} - - - - //! TagAndProbe on HLT taus - const edm::TriggerNames &names = iEvent.triggerNames(*triggerBits); - const pat::TauRef tau = (*tauHandle)[0] ; - const pat::MuonRef muon = (*muonHandle)[0] ; - const pat::MET& met = (*metHandle)[0]; - - _MET = met.pt(); - _mT = this->ComputeMT (muon->p4(), met); - - if(muonHandle.isValid()) _isOS = (muon -> charge() / tau -> charge() < 0) ? true : false; - - - _tauTriggerBitSet.reset(); - - bool foundMuTrigger = false; - - for (pat::TriggerObjectStandAlone obj : *triggerObjects) - { - - obj.unpackPathNames(names); - const edm::TriggerNames::Strings& triggerNames = names.triggerNames(); - - if(obj.hasTriggerObjectType(trigger::TriggerMuon)){ - - const float dR = deltaR (*muon, obj); - if ( dR < 0.5 && fabs(obj.eta())<2.1 ){ - - for (const tParameterSet& parameter : _parameters_Tag) - { - if ((parameter.hltPathIndex >= 0)&&(obj.hasPathName(triggerNames[parameter.hltPathIndex], true, false))) - foundMuTrigger = true; - } - - } - - } - - - - const float dR = deltaR (*tau, obj); - if ( dR < 0.5) - { - _isMatched = true; - _hasTriggerTauType = obj.hasTriggerObjectType(trigger::TriggerTau); - _hasTriggerMuonType = obj.hasTriggerObjectType(trigger::TriggerMuon); - - //Looking for the path - unsigned int x = 0; - bool foundTrigger = false; - for (const tParameterSet& parameter : _parameters) - { - if ((parameter.hltPathIndex >= 0)&&(obj.hasPathName(triggerNames[parameter.hltPathIndex], true, false))) - - { - - foundTrigger = true; - //Path found, now looking for the label 1, if present in the parameter set - //Retrieving filter list for the event - - const std::vector& filters = (parameter.leg1 == 15)? (parameter.hltFilters1):(parameter.hltFilters2); - if (this -> hasFilters(obj, filters)) - { - _hltPt[x] = obj.pt(); - _hltEta[x] = obj.eta(); - _hltPhi[x] = obj.phi(); - _hltMass[x] = obj.p4().mass(); - _tauTriggerBitSet[x] = true; - } - } - x++; - } - if (foundTrigger) _foundJet++; - - const std::vector& L2CaloJetIsoPix_filters = {"hltL2TauIsoFilter"}; - if (this -> hasFilters(obj, L2CaloJetIsoPix_filters) && obj.pt()>_hltL2CaloJetIsoPixPt){ - _hltL2CaloJetIsoPixPt = obj.pt(); - _hltL2CaloJetIsoPixEta = obj.eta(); - _hltL2CaloJetIsoPixPhi = obj.phi(); - } - - const std::vector& PFTauTrack_filters = {"hltPFTauTrack"}; - if (this -> hasFilters(obj, PFTauTrack_filters) && obj.pt()>_hltPFTauTrackPt){ - _hltPFTauTrackPt = obj.pt(); - _hltPFTauTrackEta = obj.eta(); - _hltPFTauTrackPhi = obj.phi(); - } - - const std::vector& PFTauTrackReg_filters = {"hltPFTauTrackReg"}; - if (this -> hasFilters(obj, PFTauTrackReg_filters) && obj.pt()>_hltPFTauTrackRegPt){ - _hltPFTauTrackRegPt = obj.pt(); - _hltPFTauTrackRegEta = obj.eta(); - _hltPFTauTrackRegPhi = obj.phi(); - } - - const std::vector& PFTau35TrackPt1Reg_filters = {"hltSinglePFTau35TrackPt1Reg"}; - if (this -> hasFilters(obj, PFTau35TrackPt1Reg_filters) && obj.pt()>_hltPFTau35TrackPt1RegPt){ - _hltPFTau35TrackPt1RegPt = obj.pt(); - _hltPFTau35TrackPt1RegEta = obj.eta(); - _hltPFTau35TrackPt1RegPhi = obj.phi(); - } - - const std::vector& HPSPFTauTrack_filters = {"hltHpsPFTauTrack"}; - if (this -> hasFilters(obj, HPSPFTauTrack_filters) && obj.pt()>_hltHPSPFTauTrackPt){ - _hltHPSPFTauTrackPt = obj.pt(); - _hltHPSPFTauTrackEta = obj.eta(); - _hltHPSPFTauTrackPhi = obj.phi(); - } - - const std::vector& HPSPFTauTrackReg_filters = {"hltHpsPFTauTrackReg"}; - if (this -> hasFilters(obj, HPSPFTauTrackReg_filters) && obj.pt()>_hltHPSPFTauTrackRegPt){ - _hltHPSPFTauTrackRegPt = obj.pt(); - _hltHPSPFTauTrackRegEta = obj.eta(); - _hltHPSPFTauTrackRegPhi = obj.phi(); - } - - } - } - - - if(L2CaloJets_ForIsoPix_Handle.isValid() && L2CaloJets_ForIsoPix_IsoHandle.isValid()){ - - for (auto const & jet : *L2CaloJets_ForIsoPix_IsoHandle){ - edm::Ref jetRef = edm::Ref(L2CaloJets_ForIsoPix_Handle,jet.first.key()); - - const float dR = deltaR (*tau, *(jet.first)); - - if ( dR < 0.5 && jet.first->pt()>_hltL2CaloJetPt) - { - _hltL2CaloJetPt = jet.first->pt(); - _hltL2CaloJetEta = jet.first->eta(); - _hltL2CaloJetPhi = jet.first->phi(); - _hltL2CaloJetIso = jet.second; - } - - } - - } - - - - //! TagAndProbe on L1T taus - - edm::Handle< BXVector > L1TauHandle; - iEvent.getByToken(_L1TauTag, L1TauHandle); - - float minDR = 0.5; //Uncomment for new match algo - - for (l1t::TauBxCollection::const_iterator bx0TauIt = L1TauHandle->begin(0); bx0TauIt != L1TauHandle->end(0) ; bx0TauIt++) - { - const float dR = deltaR(*tau, *bx0TauIt); - const l1t::Tau& l1tTau = *bx0TauIt; - - //cout<<"FW Tau, pT = "<tauID("decayModeFindingNewDMs"); - - _byLooseCombinedIsolationDeltaBetaCorr3Hits = tau->tauID("byLooseCombinedIsolationDeltaBetaCorr3Hits"); - _byMediumCombinedIsolationDeltaBetaCorr3Hits = tau->tauID("byMediumCombinedIsolationDeltaBetaCorr3Hits"); - _byTightCombinedIsolationDeltaBetaCorr3Hits = tau->tauID("byTightCombinedIsolationDeltaBetaCorr3Hits"); - _byVLooseIsolationMVArun2v1DBoldDMwLT = tau->tauID("byVLooseIsolationMVArun2v1DBoldDMwLT"); - _byLooseIsolationMVArun2v1DBoldDMwLT = tau->tauID("byLooseIsolationMVArun2v1DBoldDMwLT"); - _byMediumIsolationMVArun2v1DBoldDMwLT = tau->tauID("byMediumIsolationMVArun2v1DBoldDMwLT"); - _byTightIsolationMVArun2v1DBoldDMwLT = tau->tauID("byTightIsolationMVArun2v1DBoldDMwLT"); - _byVTightIsolationMVArun2v1DBoldDMwLT = tau->tauID("byVTightIsolationMVArun2v1DBoldDMwLT"); - _byVLooseIsolationMVArun2v1DBnewDMwLT = tau->tauID("byVLooseIsolationMVArun2v1DBnewDMwLT"); - _byLooseIsolationMVArun2v1DBnewDMwLT = tau->tauID("byLooseIsolationMVArun2v1DBnewDMwLT"); - _byMediumIsolationMVArun2v1DBnewDMwLT = tau->tauID("byMediumIsolationMVArun2v1DBnewDMwLT"); - _byTightIsolationMVArun2v1DBnewDMwLT = tau->tauID("byTightIsolationMVArun2v1DBnewDMwLT"); - _byVTightIsolationMVArun2v1DBnewDMwLT = tau->tauID("byVTightIsolationMVArun2v1DBnewDMwLT"); - _byLooseIsolationMVArun2v1DBdR03oldDMwLT = tau->tauID("byLooseIsolationMVArun2v1DBdR03oldDMwLT"); - _byMediumIsolationMVArun2v1DBdR03oldDMwLT = tau->tauID("byMediumIsolationMVArun2v1DBdR03oldDMwLT"); - _byTightIsolationMVArun2v1DBdR03oldDMwLT = tau->tauID("byTightIsolationMVArun2v1DBdR03oldDMwLT"); - _byVTightIsolationMVArun2v1DBdR03oldDMwLT = tau->tauID("byVTightIsolationMVArun2v1DBdR03oldDMwLT"); - - // 2017v1 training for Fall 17 - _byIsolationMVArun2017v1DBoldDMwLTraw2017 = tau->tauID("byIsolationMVArun2017v1DBoldDMwLTraw2017"); - _byVVLooseIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byVVLooseIsolationMVArun2017v1DBoldDMwLT2017"); - _byVLooseIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byVLooseIsolationMVArun2017v1DBoldDMwLT2017"); - _byLooseIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byLooseIsolationMVArun2017v1DBoldDMwLT2017"); - _byMediumIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byMediumIsolationMVArun2017v1DBoldDMwLT2017"); - _byTightIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byTightIsolationMVArun2017v1DBoldDMwLT2017"); - _byVTightIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byVTightIsolationMVArun2017v1DBoldDMwLT2017"); - _byVVTightIsolationMVArun2017v1DBoldDMwLT2017 = tau->tauID("byVVTightIsolationMVArun2017v1DBoldDMwLT2017"); - - // 2017v2 training for Fall 17 - _byIsolationMVArun2017v2DBoldDMwLTraw2017 = tau->tauID("byIsolationMVArun2017v2DBoldDMwLTraw2017"); - _byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byVVLooseIsolationMVArun2017v2DBoldDMwLT2017"); - _byVLooseIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byVLooseIsolationMVArun2017v2DBoldDMwLT2017"); - _byLooseIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byLooseIsolationMVArun2017v2DBoldDMwLT2017"); - _byMediumIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byMediumIsolationMVArun2017v2DBoldDMwLT2017"); - _byTightIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byTightIsolationMVArun2017v2DBoldDMwLT2017"); - _byVTightIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byVTightIsolationMVArun2017v2DBoldDMwLT2017"); - _byVVTightIsolationMVArun2017v2DBoldDMwLT2017 = tau->tauID("byVVTightIsolationMVArun2017v2DBoldDMwLT2017"); - - // dm0p32017v2 training for Fall 17 - _byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017 = tau->tauID("byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017"); - _byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - _byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - _byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - _byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - _byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - _byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - _byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = tau->tauID("byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017"); - - // new2017v2 training for Fall 17 - _byIsolationMVArun2017v2DBnewDMwLTraw2017 = tau->tauID("byIsolationMVArun2017v2DBnewDMwLTraw2017"); - _byVVLooseIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byVVLooseIsolationMVArun2017v2DBnewDMwLT2017"); - _byVLooseIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byVLooseIsolationMVArun2017v2DBnewDMwLT2017"); - _byLooseIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byLooseIsolationMVArun2017v2DBnewDMwLT2017"); - _byMediumIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byMediumIsolationMVArun2017v2DBnewDMwLT2017"); - _byTightIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byTightIsolationMVArun2017v2DBnewDMwLT2017"); - _byVTightIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byVTightIsolationMVArun2017v2DBnewDMwLT2017"); - _byVVTightIsolationMVArun2017v2DBnewDMwLT2017 = tau->tauID("byVVTightIsolationMVArun2017v2DBnewDMwLT2017"); - - _againstMuonLoose3 = tau->tauID("againstMuonLoose3"); - _againstMuonTight3 = tau->tauID("againstMuonTight3"); - _againstElectronVLooseMVA6 = tau->tauID("againstElectronVLooseMVA6"); - _againstElectronLooseMVA6 = tau->tauID("againstElectronLooseMVA6"); - _againstElectronMediumMVA6 = tau->tauID("againstElectronMediumMVA6"); - _againstElectronTightMVA6 = tau->tauID("againstElectronTightMVA6"); - _againstElectronVTightMVA6 = tau->tauID("againstElectronVTightMVA6"); - - if(muonHandle.isValid()){ - _muonPt=muon->pt(); - _muonEta=muon->eta(); - _muonPhi=muon->phi(); - _mVis = (muon->p4() + tau->p4()).mass(); - } - - _Nvtx = vertexes->size(); - - _nTruePU = -99; - - if (_isMC) { - - std::vector::const_iterator PVI; - for(PVI = puInfo->begin(); PVI != puInfo->end(); ++PVI) { - if(PVI->getBunchCrossing() == 0) { - float nTrueInt = PVI->getTrueNumInteractions(); - _nTruePU = nTrueInt; - break; - } - } - - } - - - _tauTriggerBits = _tauTriggerBitSet.to_ulong(); - - //Gen-matching - - if(_isMC){ - - const edm::View* genparts = genPartHandle.product(); - _tau_genindex = this->GenIndex(tau,genparts); - - } - - - - //std::cout << "++++++++++ FILL ++++++++++" << std::endl; - - if(foundMuTrigger) - _tree -> Fill(); - -} - -bool Ntuplizer::hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor) { - - const std::vector& eventLabels = obj.filterLabels(); - for (const std::string& filter : filtersToLookFor) - { - //Looking for matching filters - bool found = false; - for (const std::string& label : eventLabels) - { - - if (label == filter) - { - //std::cout << "#### FOUND FILTER " << label << " == " << filter << " ####" << std::endl; - found = true; - } - } - if(!found) return false; - } - - return true; -} - - - -int Ntuplizer::GenIndex(const pat::TauRef& tau, const edm::View* genparts){ - - float dRmin = 1.0; - int genMatchInd = -1; - - for(edm::View::const_iterator genpart = genparts->begin(); genpart!=genparts->end();++genpart){ - - int flags = genpart->userInt ("generalGenFlags"); - int apdg = abs(genpart->pdgId()); - float pT = genpart->p4().pt(); - - if( !( apdg==11 || apdg==13 || apdg==66615) ) continue; - - if( apdg==11 || apdg==13){ - if( !(pT>8 && (flags&1 || (flags>>5)&1)) ) continue; - } - else if(apdg==66615){ - int tauMothInd = genpart->userInt("TauMothIndex"); - pat::GenericParticle mother = (*genparts)[tauMothInd]; - int flags_tau = mother.userInt ("generalGenFlags"); - if( !(pT>15 && flags_tau&1) ) continue; - } - - float dR = deltaR(*tau,*genpart); - if(dR<0.2 && dR>5)&1) genMatchInd = 3; - } - else if(apdg==13){ - if(flags&1) genMatchInd = 2; - else if((flags>>5)&1) genMatchInd = 4; - } - else if(apdg==66615) - genMatchInd = 5; - } - - } - - return genMatchInd; - - -} - - - - -float Ntuplizer::ComputeMT (math::XYZTLorentzVector visP4, const pat::MET& met) -{ - math::XYZTLorentzVector METP4 (met.pt()*TMath::Cos(met.phi()), met.pt()*TMath::Sin(met.phi()), 0, met.pt()); - float scalSum = met.pt() + visP4.pt(); - - math::XYZTLorentzVector vecSum (visP4); - vecSum += METP4; - float vecSumPt = vecSum.pt(); - return sqrt (scalSum*scalSum - vecSumPt*vecSumPt); -} - - - - -#include -DEFINE_FWK_MODULE(Ntuplizer); - -#endif //NTUPLIZER_H diff --git a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe.cc b/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe.cc deleted file mode 100644 index 797d73c39e4..00000000000 --- a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe.cc +++ /dev/null @@ -1,817 +0,0 @@ -#ifndef NTUPLIZER_NOTAGANDPROBE_H -#define NTUPLIZER_NOTAGANDPROBE_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -#include "FWCore/Framework/interface/EDAnalyzer.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include "DataFormats/PatCandidates/interface/Jet.h" - -#include "DataFormats/L1Trigger/interface/Jet.h" - -#include "FWCore/ServiceRegistry/interface/Service.h" -#include "FWCore/Common/interface/TriggerNames.h" -#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" -#include "DataFormats/L1Trigger/interface/Tau.h" -#include "DataFormats/VertexReco/interface/Vertex.h" - -#include "FWCore/Framework/interface/EventSetup.h" -#include "JetMETCorrections/Objects/interface/JetCorrector.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectorParameters.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectionUncertainty.h" -#include "JetMETCorrections/Objects/interface/JetCorrectionsRecord.h" - -#include "DataFormats/Common/interface/TriggerResults.h" - -#include "SimDataFormats/GeneratorProducts/interface/GenEventInfoProduct.h" - - -#include "tParameterSet.h" - -#include "CommonTools/UtilAlgos/interface/TFileService.h" -#include - - -//Set this variable to decide the number of triggers that you want to check simultaneously -#define NUMBER_OF_MAXIMUM_TRIGGERS 64 - - -/* - ██████ ███████ ██████ ██ █████ ██████ █████ ████████ ██ ██████ ███ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ █████ ██ ██ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██████ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -class Ntuplizer_noTagAndProbe : public edm::EDAnalyzer { -public: - /// Constructor - explicit Ntuplizer_noTagAndProbe(const edm::ParameterSet&); - /// Destructor - virtual ~Ntuplizer_noTagAndProbe(); - -private: - //----edm control--- - virtual void beginJob() ; - virtual void beginRun(edm::Run const&, edm::EventSetup const&); - virtual void analyze(const edm::Event&, const edm::EventSetup&); - virtual void endJob(); - virtual void endRun(edm::Run const&, edm::EventSetup const&); - void Initialize(); - bool hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor); - int FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc); - - TTree *_tree; - TTree *_triggerNamesTree; - std::string _treeName; - // ------------------------------------- - // variables to be filled in output tree - ULong64_t _indexevents; - Int_t _runNumber; - Int_t _lumi; - float _MC_weight; - unsigned long _tauTriggerBits; - float _tauPt; - float _tauEta; - float _tauPhi; - int _tauCharge; - int _tauDecayMode; - float _hltPt; - float _hltEta; - float _hltPhi; - int _l1tQual; - float _l1tPt; - float _l1tEta; - float _l1tPhi; - int _l1tIso; - int _l1tEmuQual; - float _l1tEmuPt; - float _l1tEmuEta; - float _l1tEmuPhi; - int _l1tEmuIso; - int _l1tEmuNTT; - int _l1tEmuHasEM; - int _l1tEmuIsMerged; - int _l1tEmuTowerIEta; - int _l1tEmuTowerIPhi; - int _l1tEmuRawEt; - int _l1tEmuIsoEt; - std::vector _l1tQualJet; - std::vector _l1tPtJet; - std::vector _l1tEtaJet; - std::vector _l1tPhiJet; - std::vector _l1tIsoJet; - std::vector _l1tTowerIEtaJet; - std::vector _l1tTowerIPhiJet; - std::vector _l1tRawEtJet; - - Bool_t _hasTriggerMuonType; - Bool_t _hasTriggerTauType; - Bool_t _isMatched; - Bool_t _isOS; - int _foundJet; - int _Nvtx; - - - //Jets variables - Int_t _numberOfJets; - std::vector _jets_px; - std::vector _jets_py; - std::vector _jets_pz; - std::vector _jets_e; - std::vector _jets_rawPt; - std::vector _jets_area; - std::vector _jets_mT; - std::vector _jets_PUJetID; - std::vector _jets_PUJetIDupdated; - std::vector _jets_vtxPt; - std::vector _jets_vtxMass; - std::vector _jets_vtx3dL; - std::vector _jets_vtxNtrk; - std::vector _jets_vtx3deL; - std::vector _jets_leadTrackPt; - std::vector _jets_leptonPtRel; - std::vector _jets_leptonPt; - std::vector _jets_leptonDeltaR; - std::vector _jets_chEmEF; - std::vector _jets_chHEF; - std::vector _jets_nEmEF; - std::vector _jets_nHEF; - std::vector _jets_MUF; - std::vector _jets_neMult; - std::vector _jets_chMult; - std::vector _jets_jecUnc; - - std::vector _jets_QGdiscr; - - std::vector _jets_Flavour; // parton flavour - std::vector _jets_HadronFlavour; // hadron flavour - std::vector _jets_genjetIndex; - std::vector _bdiscr; - std::vector _bdiscr2; - std::vector _bdiscr3; - std::vector _jetID; //1=loose, 2=tight, 3=tightlepveto - std::vector _jetrawf; - - edm::EDGetTokenT _genTag; - edm::EDGetTokenT _tauTag; - edm::EDGetTokenT _triggerObjects; - edm::EDGetTokenT _triggerBits; - edm::EDGetTokenT _L1TauTag ; - edm::EDGetTokenT _L1EmuTauTag ; - edm::EDGetTokenT> _JetTag; - edm::EDGetTokenT> _l1tJetTag; - // edm::EDGetTokenT> _l1tEmuJetTag; - edm::EDGetTokenT> _VtxTag; - - //!Contains the parameters - tVParameterSet _parameters; - - edm::InputTag _processName; - //! Maximum - std::bitset _tauTriggerBitSet; - - - - HLTConfigProvider _hltConfig; - - -}; - -/* - ██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██ - ██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -// ----Constructor and Destructor ----- -Ntuplizer_noTagAndProbe::Ntuplizer_noTagAndProbe(const edm::ParameterSet& iConfig) : - _genTag (consumes (iConfig.getParameter("genCollection"))), - _tauTag (consumes (iConfig.getParameter("taus"))), - _triggerObjects (consumes (iConfig.getParameter("triggerSet"))), - _triggerBits (consumes (iConfig.getParameter("triggerResultsLabel"))), - _L1TauTag (consumes (iConfig.getParameter("L1Tau"))), - _L1EmuTauTag (consumes (iConfig.getParameter("L1EmuTau"))), -_JetTag (consumes> (iConfig.getParameter("jetCollection"))), -_l1tJetTag (consumes> (iConfig.getParameter("l1tJetCollection"))), - _VtxTag (consumes> (iConfig.getParameter("Vertexes"))) -{ - this -> _treeName = iConfig.getParameter("treeName"); - this -> _processName = iConfig.getParameter("triggerResultsLabel"); - - TString triggerName; - edm::Service fs; - this -> _triggerNamesTree = fs -> make("triggerNames", "triggerNames"); - this -> _triggerNamesTree -> Branch("triggerNames",&triggerName); - - //Building the trigger arrays - const std::vector& HLTList = iConfig.getParameter > ("triggerList"); - for (const edm::ParameterSet& parameterSet : HLTList) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - triggerName = pSet.hltPath; - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - this -> _parameters.push_back(pSet); - - this -> _triggerNamesTree -> Fill(); - } - - - this -> Initialize(); - return; -} - -Ntuplizer_noTagAndProbe::~Ntuplizer_noTagAndProbe() -{} - -void Ntuplizer_noTagAndProbe::beginRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - Bool_t changedConfig = false; - - if(!this -> _hltConfig.init(iRun, iSetup, this -> _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTConfigProvider failed!!"; - return; - } - - const edm::TriggerNames::Strings& triggerNames = this -> _hltConfig.triggerNames(); - //std::cout << " ===== LOOKING FOR THE PATH INDEXES =====" << std::endl; - for (tParameterSet& parameter : this -> _parameters){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - //std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - //std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - -} - -void Ntuplizer_noTagAndProbe::Initialize() { - this -> _indexevents = 0; - this -> _runNumber = 0; - this -> _lumi = 0; - this -> _MC_weight = 1; - this -> _tauPt = -1.; - this -> _tauEta = -1.; - this -> _tauPhi = -1.; - this -> _tauCharge = -1; - this -> _tauDecayMode = -1; - this -> _isMatched = false; - this -> _hltPt = -1; - this -> _hltEta = 666; - this -> _hltPhi = 666; - this -> _l1tPt = -1; - this -> _l1tEta = 666; - this -> _l1tPhi = 666; - this -> _l1tQual = -1; - this -> _l1tIso = -1; - this -> _l1tEmuPt = -1; - this -> _l1tEmuEta = 666; - this -> _l1tEmuPhi = 666; - this -> _l1tEmuQual = -1; - this -> _l1tEmuIso = -1; - this -> _l1tEmuNTT = -1; - this -> _l1tEmuHasEM = -1; - this -> _l1tEmuIsMerged = -1; - this -> _l1tEmuTowerIEta = -1; - this -> _l1tEmuTowerIPhi = -1; - this -> _l1tEmuRawEt = -1; - this -> _l1tEmuIsoEt = -1; - this -> _foundJet = 0; - - this -> _l1tPtJet . clear(); - this -> _l1tEtaJet . clear(); - this -> _l1tPhiJet . clear(); - this -> _l1tQualJet . clear(); - this -> _l1tIsoJet . clear(); - this -> _l1tTowerIEtaJet . clear(); - this -> _l1tTowerIPhiJet . clear(); - this -> _l1tRawEtJet . clear(); - - _jets_px.clear(); - _jets_py.clear(); - _jets_pz.clear(); - _jets_e.clear(); - _jets_rawPt.clear(); - _jets_area.clear(); - _jets_mT.clear(); - _jets_PUJetID.clear(); - _jets_PUJetIDupdated.clear(); - _jets_vtxPt.clear(); - _jets_vtxMass.clear(); - _jets_vtx3dL.clear(); - _jets_vtxNtrk.clear(); - _jets_vtx3deL.clear(); - _jets_leadTrackPt.clear(); - _jets_leptonPtRel.clear(); - _jets_leptonPt.clear(); - _jets_leptonDeltaR.clear(); - _jets_chEmEF.clear(); - _jets_chHEF.clear(); - _jets_nEmEF.clear(); - _jets_nHEF.clear(); - _jets_MUF.clear(); - _jets_neMult.clear(); - _jets_chMult.clear(); - _jets_Flavour.clear(); - _jets_HadronFlavour.clear(); - _jets_genjetIndex.clear(); - _jets_jecUnc.clear(); - _jets_QGdiscr.clear(); - _numberOfJets=0; - _bdiscr.clear(); - _bdiscr2.clear(); - _bdiscr3.clear(); - _jetID.clear(); - _jetrawf.clear(); - -} - - -void Ntuplizer_noTagAndProbe::beginJob() -{ - edm::Service fs; - this -> _tree = fs -> make(this -> _treeName.c_str(), this -> _treeName.c_str()); - - //Branches - this -> _tree -> Branch("EventNumber",&_indexevents,"EventNumber/l"); - this -> _tree -> Branch("RunNumber",&_runNumber,"RunNumber/I"); - this -> _tree -> Branch("lumi",&_lumi,"lumi/I"); - this -> _tree -> Branch("MC_weight",&_MC_weight,"MC_weight/F"); - this -> _tree -> Branch("tauTriggerBits", &_tauTriggerBits, "tauTriggerBits/l"); - this -> _tree -> Branch("tauPt", &_tauPt, "tauPt/F"); - this -> _tree -> Branch("tauEta", &_tauEta, "tauEta/F"); - this -> _tree -> Branch("tauPhi", &_tauPhi, "tauPhi/F"); - this -> _tree -> Branch("tauCharge", &_tauCharge, "tauCharge/I"); - this -> _tree -> Branch("tauDecayMode", &_tauDecayMode, "tauDecayMode/I"); - this -> _tree -> Branch("hltPt", &_hltPt, "hltPt/F"); - this -> _tree -> Branch("hltEta", &_hltEta, "hltEta/F"); - this -> _tree -> Branch("hltPhi", &_hltPhi, "hltPhi/F"); - this -> _tree -> Branch("l1tPt", &_l1tPt, "l1tPt/F"); - this -> _tree -> Branch("l1tEta", &_l1tEta, "l1tEta/F"); - this -> _tree -> Branch("l1tPhi", &_l1tPhi, "l1tPhi/F"); - this -> _tree -> Branch("l1tQual", &_l1tQual, "l1tQual/I"); - this -> _tree -> Branch("l1tIso", &_l1tIso, "l1tIso/I"); - this -> _tree -> Branch("l1tEmuPt", &_l1tEmuPt, "l1tEmuPt/F"); - this -> _tree -> Branch("l1tEmuEta", &_l1tEmuEta, "l1tEmuEta/F"); - this -> _tree -> Branch("l1tEmuPhi", &_l1tEmuPhi, "l1tEmuPhi/F"); - this -> _tree -> Branch("l1tEmuQual", &_l1tEmuQual, "l1tEmuQual/I"); - this -> _tree -> Branch("l1tEmuIso", &_l1tEmuIso, "l1tEmuIso/I"); - this -> _tree -> Branch("l1tEmuNTT", &_l1tEmuNTT, "l1tEmuNTT/I"); - this -> _tree -> Branch("l1tEmuHasEM", &_l1tEmuHasEM, "l1tEmuHasEM/I"); - this -> _tree -> Branch("l1tEmuIsMerged", &_l1tEmuIsMerged, "l1tEmuIsMerged/I"); - this -> _tree -> Branch("l1tEmuTowerIEta", &_l1tEmuTowerIEta, "l1tEmuTowerIEta/I"); - this -> _tree -> Branch("l1tEmuTowerIPhi", &_l1tEmuTowerIPhi, "l1tEmuTowerIPhi/I"); - this -> _tree -> Branch("l1tEmuRawEt", &_l1tEmuRawEt, "l1tEmuRawEt/I"); - this -> _tree -> Branch("l1tEmuIsoEt", &_l1tEmuIsoEt, "l1tEmuIsoEt/I"); - - this -> _tree -> Branch("l1tPtJet", &_l1tPtJet); - this -> _tree -> Branch("l1tEtaJet", &_l1tEtaJet); - this -> _tree -> Branch("l1tPhiJet", &_l1tPhiJet); - this -> _tree -> Branch("l1tQualJet", &_l1tQualJet); - this -> _tree -> Branch("l1tIsoJet", &_l1tIsoJet); - this -> _tree -> Branch("l1tTowerIEtaJet", &_l1tTowerIEtaJet); - this -> _tree -> Branch("l1tTowerIPhiJet", &_l1tTowerIPhiJet); - this -> _tree -> Branch("l1tRawEtJet", &_l1tRawEtJet); - - this -> _tree -> Branch("hasTriggerMuonType", &_hasTriggerMuonType, "hasTriggerMuonType/O"); - this -> _tree -> Branch("hasTriggerTauType", &_hasTriggerTauType, "hasTriggerTauType/O"); - this -> _tree -> Branch("isMatched", &_isMatched, "isMatched/O"); - this -> _tree -> Branch("isOS", &_isOS, "isOS/O"); - this -> _tree -> Branch("foundJet", &_foundJet, "foundJet/I"); - this -> _tree -> Branch("Nvtx", &_Nvtx, "Nvtx/I"); - - this -> _tree->Branch("JetsNumber",&_numberOfJets,"JetsNumber/I"); - this -> _tree->Branch("jets_px",&_jets_px); - this -> _tree->Branch("jets_py",&_jets_py); - this -> _tree->Branch("jets_pz",&_jets_pz); - this -> _tree->Branch("jets_e",&_jets_e); - this -> _tree->Branch("jets_rawPt", &_jets_rawPt); - this -> _tree->Branch("jets_area", &_jets_area); - this -> _tree->Branch("jets_mT", &_jets_mT); - this -> _tree->Branch("jets_Flavour",&_jets_Flavour); - this -> _tree->Branch("jets_HadronFlavour",&_jets_HadronFlavour); - this -> _tree->Branch("jets_genjetIndex", &_jets_genjetIndex); - this -> _tree->Branch("jets_PUJetID",&_jets_PUJetID); - this -> _tree->Branch("jets_PUJetIDupdated",&_jets_PUJetIDupdated); - this -> _tree->Branch("jets_vtxPt", &_jets_vtxPt); - this -> _tree->Branch("jets_vtxMass", &_jets_vtxMass); - this -> _tree->Branch("jets_vtx3dL", &_jets_vtx3dL); - this -> _tree->Branch("jets_vtxNtrk", &_jets_vtxNtrk); - this -> _tree->Branch("jets_vtx3deL", &_jets_vtx3deL); - this -> _tree->Branch("jets_leadTrackPt", &_jets_leadTrackPt); - this -> _tree->Branch("jets_leptonPtRel", &_jets_leptonPtRel); - this -> _tree->Branch("jets_leptonPt", &_jets_leptonPt); - this -> _tree->Branch("jets_leptonDeltaR", &_jets_leptonDeltaR); - this -> _tree->Branch("jets_chEmEF" , &_jets_chEmEF); - this -> _tree->Branch("jets_chHEF" , &_jets_chHEF); - this -> _tree->Branch("jets_nEmEF" , &_jets_nEmEF); - this -> _tree->Branch("jets_nHEF" , &_jets_nHEF); - this -> _tree->Branch("jets_MUF" , &_jets_MUF); - this -> _tree->Branch("jets_neMult" , &_jets_neMult); - this -> _tree->Branch("jets_chMult" , &_jets_chMult); - this -> _tree->Branch("jets_jecUnc" , &_jets_jecUnc); - - return; -} - - -void Ntuplizer_noTagAndProbe::endJob() -{ - return; -} - - -void Ntuplizer_noTagAndProbe::endRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - return; -} - - -void Ntuplizer_noTagAndProbe::analyze(const edm::Event& iEvent, const edm::EventSetup& eSetup) -{ - this -> Initialize(); - - _indexevents = iEvent.id().event(); - _runNumber = iEvent.id().run(); - _lumi = iEvent.luminosityBlock(); - - edm::Handle genEvt; - try {iEvent.getByToken(_genTag, genEvt);} catch (...) {;} - if(genEvt.isValid()) this->_MC_weight = genEvt->weight(); - - //cout<<"EventNumber = "<<_indexevents< resultMuon(new pat::MuonRefVector); - - // search for the tag in the event - edm::Handle tauHandle; - edm::Handle triggerObjects; - edm::Handle triggerBits; - edm::Handle> jetHandle; - edm::Handle> l1tJetHandle; - edm::Handle > vertexes; - - iEvent.getByToken(this -> _tauTag, tauHandle); - iEvent.getByToken(this -> _triggerObjects, triggerObjects); - iEvent.getByToken(this -> _triggerBits, triggerBits); - iEvent.getByToken(this -> _JetTag, jetHandle); - iEvent.getByToken(this -> _l1tJetTag, l1tJetHandle); - iEvent.getByToken(this -> _VtxTag,vertexes); - - //! TagAndProbe on HLT taus - const edm::TriggerNames &names = iEvent.triggerNames(*triggerBits); - const pat::TauRef tau = (*tauHandle)[0] ; - - this -> _tauTriggerBitSet.reset(); - - - - for (pat::TriggerObjectStandAlone obj : *triggerObjects) - { - const float dR = deltaR (*tau, obj); - if ( dR < 0.5) - { - this -> _isMatched = true; - this -> _hasTriggerTauType = obj.hasTriggerObjectType(trigger::TriggerTau); - this -> _hasTriggerMuonType = obj.hasTriggerObjectType(trigger::TriggerMuon); - - obj.unpackPathNames(names); - const edm::TriggerNames::Strings& triggerNames = names.triggerNames(); - //Looking for the path - unsigned int x = 0; - bool foundTrigger = false; - for (const tParameterSet& parameter : this -> _parameters) - { - if ((parameter.hltPathIndex >= 0)&&(obj.hasPathName(triggerNames[parameter.hltPathIndex], true, false))) - { - foundTrigger = true; - //Path found, now looking for the label 1, if present in the parameter set - //std::cout << "==== FOUND PATH " << triggerNames[parameter.hltPathIndex] << " ====" << std::endl; - //Retrieving filter list for the event - const std::vector& filters = (parameter.leg1 == 15)? (parameter.hltFilters1):(parameter.hltFilters2); - if (this -> hasFilters(obj, filters)) - { - //std::cout << "#### FOUND TAU WITH HLT PATH " << x << " ####" << std::endl; - this -> _hltPt = obj.pt(); - this -> _hltEta = obj.eta(); - this -> _hltPhi = obj.phi(); - this -> _tauTriggerBitSet[x] = true; - //std::cout << this -> _tauTriggerBitSet.to_string() << std::endl; - } - } - x++; - } - if (foundTrigger) this -> _foundJet++; - } - } - - - //! TagAndProbe on L1T taus - - for(BXVector::const_iterator jet = l1tJetHandle -> begin(0); jet != l1tJetHandle -> end(0) ; jet++) - { - this -> _l1tPtJet . push_back(jet -> pt()); - this -> _l1tEtaJet . push_back(jet -> eta()); - this -> _l1tPhiJet . push_back(jet -> phi()); - this -> _l1tIsoJet . push_back(jet -> hwIso()); - //this -> _l1tNTTJet . push_back(jet -> nTT()); - this -> _l1tQualJet . push_back(jet -> hwQual()); - //this -> _l1tHasEMJet . push_back(jet -> hasEM()); - //this -> _l1tIsMergedJet . push_back(jet -> isMerged()); - this -> _l1tTowerIEtaJet . push_back(jet -> towerIEta()); - this -> _l1tTowerIPhiJet . push_back(jet -> towerIPhi()); - this -> _l1tRawEtJet . push_back(jet -> rawEt()); - //this -> _l1tIsoEtJet . push_back(jet -> isoEt()); - } - - edm::Handle< BXVector > L1TauHandle; - iEvent.getByToken(_L1TauTag, L1TauHandle); - - float minDR = 0.5; //Uncomment for new match algo - - //cout<<"ill try this: "<begin(0); bx0TauIt != L1TauHandle->end(0) ; bx0TauIt++) - { - const float dR = deltaR(*tau, *bx0TauIt); - const l1t::Tau& l1tTau = *bx0TauIt; - - //dump check - //cout<<"FW Tau, pT = "< _foundJet << " hltPt " << this -> _hltPt << endl; - - this -> _tauTriggerBits = this -> _tauTriggerBitSet.to_ulong(); - //std::cout << "++++++++++ FILL ++++++++++" << std::endl; - - const edm::View* jets = jetHandle.product(); - edm::ESHandle JetCorParColl; - eSetup.get().get("AK4PFchs",JetCorParColl); - JetCorrectorParameters const & JetCorPar = (*JetCorParColl)["Uncertainty"]; - JetCorrectionUncertainty jecUnc (JetCorPar); - _numberOfJets = FillJet(jets,iEvent, &jecUnc); - - - this -> _tree -> Fill(); - -} - -bool Ntuplizer_noTagAndProbe::hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor) { - - const std::vector& eventLabels = obj.filterLabels(); - for (const std::string& filter : filtersToLookFor) - { - //Looking for matching filters - bool found = false; - for (const std::string& label : eventLabels) - { - //if (label == std::string("hltOverlapFilterIsoMu17MediumIsoPFTau40Reg")) - if (label == filter) - { - - //std::cout << "#### FOUND FILTER " << label << " == " << filter << " ####" << std::endl; - found = true; - } - } - if(!found) return false; - } - - return true; -} - -int Ntuplizer_noTagAndProbe::FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc){ - int nJets=0; - vector > softLeptInJet; // pt, idx - for(edm::View::const_iterator ijet = jets->begin(); ijet!=jets->end();++ijet){ - nJets++; - _jets_px.push_back( (float) ijet->px()); - _jets_py.push_back( (float) ijet->py()); - _jets_pz.push_back( (float) ijet->pz()); - _jets_e.push_back( (float) ijet->energy()); - _jets_mT.push_back( (float) ijet->mt()); - _jets_Flavour.push_back(ijet->partonFlavour()); - _jets_HadronFlavour.push_back(ijet->hadronFlavour()); - _jets_PUJetID.push_back(ijet->userFloat("pileupJetId:fullDiscriminant")); - _jets_PUJetIDupdated.push_back(ijet->hasUserFloat("pileupJetIdUpdated:fullDiscriminant") ? ijet->userFloat("pileupJetIdUpdated:fullDiscriminant") : -999); - //float vtxPx = ijet->userFloat ("vtxPx"); - //float vtxPy = ijet->userFloat ("vtxPy"); - //_jets_vtxPt. push_back(TMath::Sqrt(vtxPx*vtxPx + vtxPy*vtxPy)); - //_jets_vtxMass.push_back(ijet->userFloat("vtxMass")); - //_jets_vtx3dL. push_back(ijet->userFloat("vtx3DVal")); - //_jets_vtxNtrk.push_back(ijet->userFloat("vtxNtracks")); - //_jets_vtx3deL.push_back(ijet->userFloat("vtx3DSig")); - - _bdiscr.push_back(ijet->bDiscriminator("pfJetProbabilityBJetTags")); - _bdiscr2.push_back(ijet->bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags")); - _bdiscr3.push_back(ijet->bDiscriminator("pfCombinedMVAV2BJetTags")); - - - //PF jet ID - float NHF = ijet->neutralHadronEnergyFraction(); - float NEMF = ijet->neutralEmEnergyFraction(); - float CHF = ijet->chargedHadronEnergyFraction(); - float MUF = ijet->muonEnergyFraction(); - float CEMF = ijet->chargedEmEnergyFraction(); - int NumNeutralParticles =ijet->neutralMultiplicity(); - int chargedMult = ijet->chargedMultiplicity(); - int NumConst = ijet->chargedMultiplicity()+NumNeutralParticles; - float CHM = ijet->chargedMultiplicity(); - float absjeta = fabs(ijet->eta()); - - _jets_chEmEF .push_back(CEMF); - _jets_chHEF .push_back(CHF); - _jets_nEmEF .push_back(NEMF); - _jets_nHEF .push_back(NHF); - _jets_chMult .push_back(chargedMult); - _jets_neMult .push_back(NumNeutralParticles); - _jets_MUF .push_back(MUF); - - int jetid=0; - bool looseJetID = false; - bool tightJetID = false; - bool tightLepVetoJetID = false; - - if (absjeta <= 2.7) - { - looseJetID = ( (NHF<0.99 && NEMF<0.99 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightLepVetoJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1 && MUF<0.8) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.90) || absjeta>2.4) ); - } - else if (absjeta <= 3.0) - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>2 ) ; - tightJetID = looseJetID; - } - else - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>10 ); - tightJetID = looseJetID; - } - if (looseJetID) ++jetid; - if (tightJetID) ++jetid; - if (tightLepVetoJetID) ++jetid; - - _jetID.push_back(jetid); - float jecFactor = ijet->jecFactor("Uncorrected") ; - float jetRawPt = jecFactor * ijet->pt(); - //float jetRawPt2 = ijet->pt() / jecFactor; // this is wrong - _jets_rawPt.push_back ( jetRawPt ); - _jets_area.push_back (ijet->jetArea()); - _jetrawf.push_back(jecFactor); - - // loop on jet contituents to retrieve info for b jet regression - int nDau = ijet -> numberOfDaughters(); - //cout << "JET: " << (ijet - jets->begin()) << " N daught: " << nDau << endl; - - // TLorentzVector vJet (0,0,0,0); - // vJet.SetPxPyPzE (ijet->px(), ijet->py(), ijet->pz(), ijet->energy()); - // TLorentzVector vDau (0,0,0,0); - // TLorentzVector vSum (0,0,0,0); - - float leadTrackPt = 0.; - softLeptInJet.clear(); - for (int iDau = 0; iDau < nDau; ++iDau) - { - // pdg id for packed pf candidates meaning is: - // the particle charge and pdgId: 11, 13, 22 for ele/mu/gamma, 211 for charged hadrons, 130 for neutral hadrons, 1 and 2 for hadronic and em particles in HF. - const reco::Candidate * dau = ijet->daughter(iDau); - if (abs(dau->pdgId()) == 11 || abs(dau->pdgId()) == 13) - { - softLeptInJet.push_back( make_pair(dau->pt(), iDau) ); - } - - if (dau->charge() != 0 ) // tracks -> charged - { - float ptBuf = dau->pt(); - if (ptBuf > leadTrackPt) leadTrackPt = ptBuf; - } - // vDau.SetPxPyPzE (dau->px(), dau->py(), dau->pz(), dau->energy()); - // vSum += vDau; - // cout << " - " << iDau << " pdg: " << dau->pdgId() << " pt: " << dau->pt() << " charge = " << dau->charge() << endl; - } - - //cout << " ## LEAD TRACK PT = " << leadTrackPt << endl; - //cout << " ## jet eta: " << ijet->eta() << endl; - _jets_leadTrackPt.push_back(leadTrackPt); - float leptonPtRel = -1.; - float leptonPt = -1.; - float leptonDeltaR = -1.; - int softLeptIdx = -1; - if (softLeptInJet.size() > 0) - { - sort(softLeptInJet.begin(), softLeptInJet.end()); - softLeptIdx = softLeptInJet.back().second; - } - if (softLeptIdx >= 0) - { - const reco::Candidate * dau = ijet->daughter(softLeptIdx); - leptonPtRel = dau->pt() / ijet->pt() ; - leptonPt = dau->pt() ; - leptonDeltaR = deltaR(*dau, *ijet) ; - } - _jets_leptonPtRel .push_back (leptonPtRel); - _jets_leptonPt .push_back (leptonPt); - _jets_leptonDeltaR.push_back (leptonDeltaR); - - //cout << " --> jet pt, eta, phi: " << vJet.Pt() << " " << vJet.Eta() << " " << vJet.Phi() << endl; - //cout << " --> sum pt, eta, phi: " << vSum.Pt() << " " << vSum.Eta() << " " << vSum.Phi() << endl; - //if (abs(ijet->hadronFlavour()) == 5 ) cout << " ------------ THIS WAS A B JET ------------" << endl; - //cout << "RAW pt: " << jetRawPt << " | " << jetRawPt2 << " --> " << vSum.Pt() << endl; - jecUnc->setJetEta(ijet->eta()); - jecUnc->setJetPt(ijet->pt()); // here you must use the CORRECTED jet pt - _jets_jecUnc.push_back(jecUnc->getUncertainty(true)); - } - - return nJets; -} - - -#include -DEFINE_FWK_MODULE(Ntuplizer_noTagAndProbe); - -#endif //NTUPLIZER_NOTAGANDPROBE_H diff --git a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_AOD.cc b/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_AOD.cc deleted file mode 100644 index 8c3c49f5d76..00000000000 --- a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_AOD.cc +++ /dev/null @@ -1,840 +0,0 @@ -#ifndef NTUPLIZER_NOTAGANDPROBE_AOD_H -#define NTUPLIZER_NOTAGANDPROBE_AOD_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -#include "FWCore/Framework/interface/EDAnalyzer.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include "DataFormats/PatCandidates/interface/Jet.h" - -#include "DataFormats/L1Trigger/interface/Jet.h" - -#include "FWCore/ServiceRegistry/interface/Service.h" -#include "FWCore/Common/interface/TriggerNames.h" -#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" -#include "DataFormats/L1Trigger/interface/Tau.h" -#include "DataFormats/VertexReco/interface/Vertex.h" - -#include "FWCore/Framework/interface/EventSetup.h" -#include "JetMETCorrections/Objects/interface/JetCorrector.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectorParameters.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectionUncertainty.h" -#include "JetMETCorrections/Objects/interface/JetCorrectionsRecord.h" - -#include "DataFormats/Common/interface/TriggerResults.h" - -#include "tParameterSet.h" - -#include "CommonTools/UtilAlgos/interface/TFileService.h" -#include - -#include "DataFormats/TauReco/interface/PFTau.h" -#include "DataFormats/TauReco/interface/PFTauFwd.h" -#include "DataFormats/TauReco/interface/PFTauDiscriminator.h" - -#include "DataFormats/JetReco/interface/PFJetCollection.h" -#include - - -//Set this variable to decide the number of triggers that you want to check simultaneously -#define NUMBER_OF_MAXIMUM_TRIGGERS 64 - - -/* - ██████ ███████ ██████ ██ █████ ██████ █████ ████████ ██ ██████ ███ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ █████ ██ ██ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██████ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -class Ntuplizer_noTagAndProbe_AOD : public edm::EDAnalyzer { -public: - /// Constructor - explicit Ntuplizer_noTagAndProbe_AOD(const edm::ParameterSet&); - /// Destructor - virtual ~Ntuplizer_noTagAndProbe_AOD(); - -private: - //----edm control--- - virtual void beginJob() ; - virtual void beginRun(edm::Run const&, edm::EventSetup const&); - virtual void analyze(const edm::Event&, const edm::EventSetup&); - virtual void endJob(); - virtual void endRun(edm::Run const&, edm::EventSetup const&); - void Initialize(); - bool hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor); - int FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc); - // int FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc); - - TTree *_tree; - TTree *_triggerNamesTree; - std::string _treeName; - // ------------------------------------- - // variables to be filled in output tree - ULong64_t _indexevents; - Int_t _runNumber; - Int_t _lumi; - unsigned long _tauTriggerBits; - float _tauPt; - float _tauEta; - float _tauPhi; - int _tauCharge; - int _tauDecayMode; - float _hltPt; - float _hltEta; - float _hltPhi; - int _l1tQual; - float _l1tPt; - float _l1tEta; - float _l1tPhi; - int _l1tIso; - int _l1tEmuQual; - float _l1tEmuPt; - float _l1tEmuEta; - float _l1tEmuPhi; - int _l1tEmuIso; - int _l1tEmuNTT; - int _l1tEmuHasEM; - int _l1tEmuIsMerged; - int _l1tEmuTowerIEta; - int _l1tEmuTowerIPhi; - int _l1tEmuRawEt; - int _l1tEmuIsoEt; - std::vector _l1tQualJet; - std::vector _l1tPtJet; - std::vector _l1tEtaJet; - std::vector _l1tPhiJet; - std::vector _l1tIsoJet; - std::vector _l1tTowerIEtaJet; - std::vector _l1tTowerIPhiJet; - std::vector _l1tRawEtJet; - - Bool_t _hasTriggerMuonType; - Bool_t _hasTriggerTauType; - Bool_t _isMatched; - Bool_t _isOS; - int _foundJet; - int _Nvtx; - - - //Jets variables - Int_t _numberOfJets; - std::vector _jets_px; - std::vector _jets_py; - std::vector _jets_pz; - std::vector _jets_e; - std::vector _jets_rawPt; - std::vector _jets_area; - std::vector _jets_mT; - std::vector _jets_PUJetID; - std::vector _jets_PUJetIDupdated; - std::vector _jets_vtxPt; - std::vector _jets_vtxMass; - std::vector _jets_vtx3dL; - std::vector _jets_vtxNtrk; - std::vector _jets_vtx3deL; - std::vector _jets_leadTrackPt; - std::vector _jets_leptonPtRel; - std::vector _jets_leptonPt; - std::vector _jets_leptonDeltaR; - std::vector _jets_chEmEF; - std::vector _jets_chHEF; - std::vector _jets_nEmEF; - std::vector _jets_nHEF; - std::vector _jets_MUF; - std::vector _jets_neMult; - std::vector _jets_chMult; - std::vector _jets_jecUnc; - - std::vector _jets_QGdiscr; - - std::vector _jets_Flavour; // parton flavour - std::vector _jets_HadronFlavour; // hadron flavour - std::vector _jets_genjetIndex; - std::vector _bdiscr; - std::vector _bdiscr2; - std::vector _bdiscr3; - std::vector _jetID; //1=loose, 2=tight, 3=tightlepveto - std::vector _jetrawf; - - edm::EDGetTokenT _tauTag; - //edm::EDGetTokenT _triggerObjects; - edm::EDGetTokenT _triggerBits; - edm::EDGetTokenT _L1TauTag ; - edm::EDGetTokenT _L1EmuTauTag ; - edm::EDGetTokenT> _JetTag; - edm::EDGetTokenT> _l1tJetTag; - // edm::EDGetTokenT> _l1tEmuJetTag; - edm::EDGetTokenT> _VtxTag; - - //!Contains the parameters - tVParameterSet _parameters; - - edm::InputTag _processName; - //! Maximum - std::bitset _tauTriggerBitSet; - - - - HLTConfigProvider _hltConfig; - - -}; - -/* - ██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██ - ██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -// ----Constructor and Destructor ----- -Ntuplizer_noTagAndProbe_AOD::Ntuplizer_noTagAndProbe_AOD(const edm::ParameterSet& iConfig) : - _tauTag (consumes (iConfig.getParameter("taus"))), - //_triggerObjects (consumes (iConfig.getParameter("triggerSet"))), - _triggerBits (consumes (iConfig.getParameter("triggerResultsLabel"))), - _L1TauTag (consumes (iConfig.getParameter("L1Tau"))), - _L1EmuTauTag (consumes (iConfig.getParameter("L1EmuTau"))), - _JetTag (consumes> (iConfig.getParameter("jetCollection"))), - _l1tJetTag (consumes> (iConfig.getParameter("l1tJetCollection"))), - _VtxTag (consumes> (iConfig.getParameter("Vertexes"))) -{ - this -> _treeName = iConfig.getParameter("treeName"); - this -> _processName = iConfig.getParameter("triggerResultsLabel"); - - TString triggerName; - edm::Service fs; - this -> _triggerNamesTree = fs -> make("triggerNames", "triggerNames"); - this -> _triggerNamesTree -> Branch("triggerNames",&triggerName); - - //Building the trigger arrays - const std::vector& HLTList = iConfig.getParameter > ("triggerList"); - for (const edm::ParameterSet& parameterSet : HLTList) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - triggerName = pSet.hltPath; - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - this -> _parameters.push_back(pSet); - - this -> _triggerNamesTree -> Fill(); - } - - - this -> Initialize(); - return; -} - -Ntuplizer_noTagAndProbe_AOD::~Ntuplizer_noTagAndProbe_AOD() -{} - -void Ntuplizer_noTagAndProbe_AOD::beginRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - Bool_t changedConfig = false; - - if(!this -> _hltConfig.init(iRun, iSetup, this -> _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTConfigProvider failed!!"; - return; - } - - const edm::TriggerNames::Strings& triggerNames = this -> _hltConfig.triggerNames(); - //std::cout << " ===== LOOKING FOR THE PATH INDEXES =====" << std::endl; - for (tParameterSet& parameter : this -> _parameters){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - //std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - //std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - -} - -void Ntuplizer_noTagAndProbe_AOD::Initialize() { - this -> _indexevents = 0; - this -> _runNumber = 0; - this -> _lumi = 0; - this -> _tauPt = -1.; - this -> _tauEta = -1.; - this -> _tauPhi = -1.; - this -> _tauCharge = -1; - this -> _tauDecayMode = -1; - this -> _isMatched = false; - this -> _hltPt = -1; - this -> _hltEta = 666; - this -> _hltPhi = 666; - this -> _l1tPt = -1; - this -> _l1tEta = 666; - this -> _l1tPhi = 666; - this -> _l1tQual = -1; - this -> _l1tIso = -1; - this -> _l1tEmuPt = -1; - this -> _l1tEmuEta = 666; - this -> _l1tEmuPhi = 666; - this -> _l1tEmuQual = -1; - this -> _l1tEmuIso = -1; - this -> _l1tEmuNTT = -1; - this -> _l1tEmuHasEM = -1; - this -> _l1tEmuIsMerged = -1; - this -> _l1tEmuTowerIEta = -1; - this -> _l1tEmuTowerIPhi = -1; - this -> _l1tEmuRawEt = -1; - this -> _l1tEmuIsoEt = -1; - this -> _foundJet = 0; - - this -> _l1tPtJet . clear(); - this -> _l1tEtaJet . clear(); - this -> _l1tPhiJet . clear(); - this -> _l1tQualJet . clear(); - this -> _l1tIsoJet . clear(); - this -> _l1tTowerIEtaJet . clear(); - this -> _l1tTowerIPhiJet . clear(); - this -> _l1tRawEtJet . clear(); - - _jets_px.clear(); - _jets_py.clear(); - _jets_pz.clear(); - _jets_e.clear(); - _jets_rawPt.clear(); - _jets_area.clear(); - _jets_mT.clear(); - _jets_PUJetID.clear(); - _jets_PUJetIDupdated.clear(); - _jets_vtxPt.clear(); - _jets_vtxMass.clear(); - _jets_vtx3dL.clear(); - _jets_vtxNtrk.clear(); - _jets_vtx3deL.clear(); - _jets_leadTrackPt.clear(); - _jets_leptonPtRel.clear(); - _jets_leptonPt.clear(); - _jets_leptonDeltaR.clear(); - _jets_chEmEF.clear(); - _jets_chHEF.clear(); - _jets_nEmEF.clear(); - _jets_nHEF.clear(); - _jets_MUF.clear(); - _jets_neMult.clear(); - _jets_chMult.clear(); - _jets_Flavour.clear(); - _jets_HadronFlavour.clear(); - _jets_genjetIndex.clear(); - _jets_jecUnc.clear(); - _jets_QGdiscr.clear(); - _numberOfJets=0; - _bdiscr.clear(); - _bdiscr2.clear(); - _bdiscr3.clear(); - _jetID.clear(); - _jetrawf.clear(); - -} - - -void Ntuplizer_noTagAndProbe_AOD::beginJob() -{ - edm::Service fs; - this -> _tree = fs -> make(this -> _treeName.c_str(), this -> _treeName.c_str()); - - //Branches - this -> _tree -> Branch("EventNumber",&_indexevents,"EventNumber/l"); - this -> _tree -> Branch("RunNumber",&_runNumber,"RunNumber/I"); - this -> _tree -> Branch("lumi",&_lumi,"lumi/I"); - this -> _tree -> Branch("tauTriggerBits", &_tauTriggerBits, "tauTriggerBits/l"); - this -> _tree -> Branch("tauPt", &_tauPt, "tauPt/F"); - this -> _tree -> Branch("tauEta", &_tauEta, "tauEta/F"); - this -> _tree -> Branch("tauPhi", &_tauPhi, "tauPhi/F"); - this -> _tree -> Branch("tauCharge", &_tauCharge, "tauCharge/I"); - this -> _tree -> Branch("tauDecayMode", &_tauDecayMode, "tauDecayMode/I"); - this -> _tree -> Branch("hltPt", &_hltPt, "hltPt/F"); - this -> _tree -> Branch("hltEta", &_hltEta, "hltEta/F"); - this -> _tree -> Branch("hltPhi", &_hltPhi, "hltPhi/F"); - this -> _tree -> Branch("l1tPt", &_l1tPt, "l1tPt/F"); - this -> _tree -> Branch("l1tEta", &_l1tEta, "l1tEta/F"); - this -> _tree -> Branch("l1tPhi", &_l1tPhi, "l1tPhi/F"); - this -> _tree -> Branch("l1tQual", &_l1tQual, "l1tQual/I"); - this -> _tree -> Branch("l1tIso", &_l1tIso, "l1tIso/I"); - this -> _tree -> Branch("l1tEmuPt", &_l1tEmuPt, "l1tEmuPt/F"); - this -> _tree -> Branch("l1tEmuEta", &_l1tEmuEta, "l1tEmuEta/F"); - this -> _tree -> Branch("l1tEmuPhi", &_l1tEmuPhi, "l1tEmuPhi/F"); - this -> _tree -> Branch("l1tEmuQual", &_l1tEmuQual, "l1tEmuQual/I"); - this -> _tree -> Branch("l1tEmuIso", &_l1tEmuIso, "l1tEmuIso/I"); - this -> _tree -> Branch("l1tEmuNTT", &_l1tEmuNTT, "l1tEmuNTT/I"); - this -> _tree -> Branch("l1tEmuHasEM", &_l1tEmuHasEM, "l1tEmuHasEM/I"); - this -> _tree -> Branch("l1tEmuIsMerged", &_l1tEmuIsMerged, "l1tEmuIsMerged/I"); - this -> _tree -> Branch("l1tEmuTowerIEta", &_l1tEmuTowerIEta, "l1tEmuTowerIEta/I"); - this -> _tree -> Branch("l1tEmuTowerIPhi", &_l1tEmuTowerIPhi, "l1tEmuTowerIPhi/I"); - this -> _tree -> Branch("l1tEmuRawEt", &_l1tEmuRawEt, "l1tEmuRawEt/I"); - this -> _tree -> Branch("l1tEmuIsoEt", &_l1tEmuIsoEt, "l1tEmuIsoEt/I"); - - this -> _tree -> Branch("l1tPtJet", &_l1tPtJet); - this -> _tree -> Branch("l1tEtaJet", &_l1tEtaJet); - this -> _tree -> Branch("l1tPhiJet", &_l1tPhiJet); - this -> _tree -> Branch("l1tQualJet", &_l1tQualJet); - this -> _tree -> Branch("l1tIsoJet", &_l1tIsoJet); - this -> _tree -> Branch("l1tTowerIEtaJet", &_l1tTowerIEtaJet); - this -> _tree -> Branch("l1tTowerIPhiJet", &_l1tTowerIPhiJet); - this -> _tree -> Branch("l1tRawEtJet", &_l1tRawEtJet); - - this -> _tree -> Branch("hasTriggerMuonType", &_hasTriggerMuonType, "hasTriggerMuonType/O"); - this -> _tree -> Branch("hasTriggerTauType", &_hasTriggerTauType, "hasTriggerTauType/O"); - this -> _tree -> Branch("isMatched", &_isMatched, "isMatched/O"); - this -> _tree -> Branch("isOS", &_isOS, "isOS/O"); - this -> _tree -> Branch("foundJet", &_foundJet, "foundJet/I"); - this -> _tree -> Branch("Nvtx", &_Nvtx, "Nvtx/I"); - - this -> _tree->Branch("JetsNumber",&_numberOfJets,"JetsNumber/I"); - this -> _tree->Branch("jets_px",&_jets_px); - this -> _tree->Branch("jets_py",&_jets_py); - this -> _tree->Branch("jets_pz",&_jets_pz); - this -> _tree->Branch("jets_e",&_jets_e); - this -> _tree->Branch("jets_rawPt", &_jets_rawPt); - this -> _tree->Branch("jets_area", &_jets_area); - this -> _tree->Branch("jets_mT", &_jets_mT); - this -> _tree->Branch("jets_Flavour",&_jets_Flavour); - this -> _tree->Branch("jets_HadronFlavour",&_jets_HadronFlavour); - this -> _tree->Branch("jets_genjetIndex", &_jets_genjetIndex); - this -> _tree->Branch("jets_PUJetID",&_jets_PUJetID); - this -> _tree->Branch("jets_PUJetIDupdated",&_jets_PUJetIDupdated); - this -> _tree->Branch("jets_vtxPt", &_jets_vtxPt); - this -> _tree->Branch("jets_vtxMass", &_jets_vtxMass); - this -> _tree->Branch("jets_vtx3dL", &_jets_vtx3dL); - this -> _tree->Branch("jets_vtxNtrk", &_jets_vtxNtrk); - this -> _tree->Branch("jets_vtx3deL", &_jets_vtx3deL); - this -> _tree->Branch("jets_leadTrackPt", &_jets_leadTrackPt); - this -> _tree->Branch("jets_leptonPtRel", &_jets_leptonPtRel); - this -> _tree->Branch("jets_leptonPt", &_jets_leptonPt); - this -> _tree->Branch("jets_leptonDeltaR", &_jets_leptonDeltaR); - this -> _tree->Branch("jets_chEmEF" , &_jets_chEmEF); - this -> _tree->Branch("jets_chHEF" , &_jets_chHEF); - this -> _tree->Branch("jets_nEmEF" , &_jets_nEmEF); - this -> _tree->Branch("jets_nHEF" , &_jets_nHEF); - this -> _tree->Branch("jets_MUF" , &_jets_MUF); - this -> _tree->Branch("jets_neMult" , &_jets_neMult); - this -> _tree->Branch("jets_chMult" , &_jets_chMult); - this -> _tree->Branch("jets_jecUnc" , &_jets_jecUnc); - - return; -} - - -void Ntuplizer_noTagAndProbe_AOD::endJob() -{ - return; -} - - -void Ntuplizer_noTagAndProbe_AOD::endRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - return; -} - - -void Ntuplizer_noTagAndProbe_AOD::analyze(const edm::Event& iEvent, const edm::EventSetup& eSetup) -{ - this -> Initialize(); - - _indexevents = iEvent.id().event(); - _runNumber = iEvent.id().run(); - _lumi = iEvent.luminosityBlock(); - - //cout<<"EventNumber = "<<_indexevents< resultMuon(new pat::MuonRefVector); - - // search for the tag in the event - edm::Handle tauHandle; - //edm::Handle triggerObjects; - edm::Handle triggerBits; - edm::Handle> jetHandle; - edm::Handle> l1tJetHandle; - edm::Handle > vertexes; - - iEvent.getByToken(this -> _tauTag, tauHandle); - //iEvent.getByToken(this -> _triggerObjects, triggerObjects); - iEvent.getByToken(this -> _triggerBits, triggerBits); - iEvent.getByToken(this -> _JetTag, jetHandle); - iEvent.getByToken(this -> _l1tJetTag, l1tJetHandle); - iEvent.getByToken(this -> _VtxTag,vertexes); - - //! TagAndProbe on HLT taus - // const edm::TriggerNames &names = iEvent.triggerNames(*triggerBits); - const reco::PFTau tau = (*tauHandle)[0] ; - // const pat::TauRef tau = (*tauHandle)[0] ; - - this -> _tauTriggerBitSet.reset(); - - - - // for (pat::TriggerObjectStandAlone obj : *triggerObjects) - // { - // const float dR = deltaR (tau.p4(), obj->p4()); - // // const float dR = deltaR (*tau, obj); - // if ( dR < 0.5) - // { - // this -> _isMatched = true; - // this -> _hasTriggerTauType = obj.hasTriggerObjectType(trigger::TriggerTau); - // this -> _hasTriggerMuonType = obj.hasTriggerObjectType(trigger::TriggerMuon); - - // obj.unpackPathNames(names); - // const edm::TriggerNames::Strings& triggerNames = names.triggerNames(); - // //Looking for the path - // unsigned int x = 0; - // bool foundTrigger = false; - // for (const tParameterSet& parameter : this -> _parameters) - // { - // if ((parameter.hltPathIndex >= 0)&&(obj.hasPathName(triggerNames[parameter.hltPathIndex], true, false))) - // { - // foundTrigger = true; - // //Path found, now looking for the label 1, if present in the parameter set - // //std::cout << "==== FOUND PATH " << triggerNames[parameter.hltPathIndex] << " ====" << std::endl; - // //Retrieving filter list for the event - // const std::vector& filters = (parameter.leg1 == 15)? (parameter.hltFilters1):(parameter.hltFilters2); - // if (this -> hasFilters(obj, filters)) - // { - // //std::cout << "#### FOUND TAU WITH HLT PATH " << x << " ####" << std::endl; - // this -> _hltPt = obj.pt(); - // this -> _hltEta = obj.eta(); - // this -> _hltPhi = obj.phi(); - // this -> _tauTriggerBitSet[x] = true; - // //std::cout << this -> _tauTriggerBitSet.to_string() << std::endl; - // } - // } - // x++; - // } - // if (foundTrigger) this -> _foundJet++; - // } - // } - - - //! TagAndProbe on L1T taus - - for(BXVector::const_iterator jet = l1tJetHandle -> begin(0); jet != l1tJetHandle -> end(0) ; jet++) - { - this -> _l1tPtJet . push_back(jet -> pt()); - this -> _l1tEtaJet . push_back(jet -> eta()); - this -> _l1tPhiJet . push_back(jet -> phi()); - this -> _l1tIsoJet . push_back(jet -> hwIso()); - //this -> _l1tNTTJet . push_back(jet -> nTT()); - this -> _l1tQualJet . push_back(jet -> hwQual()); - //this -> _l1tHasEMJet . push_back(jet -> hasEM()); - //this -> _l1tIsMergedJet . push_back(jet -> isMerged()); - this -> _l1tTowerIEtaJet . push_back(jet -> towerIEta()); - this -> _l1tTowerIPhiJet . push_back(jet -> towerIPhi()); - this -> _l1tRawEtJet . push_back(jet -> rawEt()); - //this -> _l1tIsoEtJet . push_back(jet -> isoEt()); - } - - edm::Handle< BXVector > L1TauHandle; - iEvent.getByToken(_L1TauTag, L1TauHandle); - - float minDR = 0.5; //Uncomment for new match algo - - //cout<<"ill try this: "<begin(0); bx0TauIt != L1TauHandle->end(0) ; bx0TauIt++) - { - TLorentzVector tauLV; - tauLV.SetPtEtaPhiM(tau.pt(),tau.eta(),tau.phi(),tau.mass()); - TLorentzVector l1tLV; - l1tLV.SetPtEtaPhiM(bx0TauIt->pt(),bx0TauIt->eta(),bx0TauIt->phi(),0.); - const float dR = tauLV.DeltaR(l1tLV); - // const float dR = deltaR(*tau, *bx0TauIt); - const l1t::Tau& l1tTau = *bx0TauIt; - - //dump check - //cout<<"FW Tau, pT = "< _foundJet << " hltPt " << this -> _hltPt << endl; - - this -> _tauTriggerBits = this -> _tauTriggerBitSet.to_ulong(); - //std::cout << "++++++++++ FILL ++++++++++" << std::endl; - - const edm::View* jets = jetHandle.product(); - edm::ESHandle JetCorParColl; - eSetup.get().get("AK4PFchs",JetCorParColl); - JetCorrectorParameters const & JetCorPar = (*JetCorParColl)["Uncertainty"]; - JetCorrectionUncertainty jecUnc (JetCorPar); - _numberOfJets = FillJet(jets,iEvent, &jecUnc); - - - this -> _tree -> Fill(); - -} - -bool Ntuplizer_noTagAndProbe_AOD::hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor) { - - const std::vector& eventLabels = obj.filterLabels(); - for (const std::string& filter : filtersToLookFor) - { - //Looking for matching filters - bool found = false; - for (const std::string& label : eventLabels) - { - //if (label == std::string("hltOverlapFilterIsoMu17MediumIsoPFTau40Reg")) - if (label == filter) - { - - //std::cout << "#### FOUND FILTER " << label << " == " << filter << " ####" << std::endl; - found = true; - } - } - if(!found) return false; - } - - return true; -} - -int Ntuplizer_noTagAndProbe_AOD::FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc){ - int nJets=0; - vector > softLeptInJet; // pt, idx - for(edm::View::const_iterator ijet = jets->begin(); ijet!=jets->end();++ijet){ - nJets++; - _jets_px.push_back( (float) ijet->px()); - _jets_py.push_back( (float) ijet->py()); - _jets_pz.push_back( (float) ijet->pz()); - _jets_e.push_back( (float) ijet->energy()); - _jets_mT.push_back( (float) ijet->mt()); - // _jets_Flavour.push_back(ijet->partonFlavour()); - // _jets_HadronFlavour.push_back(ijet->hadronFlavour()); - // _jets_PUJetID.push_back(ijet->userFloat("pileupJetId:fullDiscriminant")); - // _jets_PUJetIDupdated.push_back(ijet->hasUserFloat("pileupJetIdUpdated:fullDiscriminant") ? ijet->userFloat("pileupJetIdUpdated:fullDiscriminant") : -999); - // float vtxPx = ijet->userFloat ("vtxPx"); - // float vtxPy = ijet->userFloat ("vtxPy"); - // _jets_vtxPt. push_back(TMath::Sqrt(vtxPx*vtxPx + vtxPy*vtxPy)); - // _jets_vtxMass.push_back(ijet->userFloat("vtxMass")); - // _jets_vtx3dL. push_back(ijet->userFloat("vtx3DVal")); - // _jets_vtxNtrk.push_back(ijet->userFloat("vtxNtracks")); - // _jets_vtx3deL.push_back(ijet->userFloat("vtx3DSig")); - - // _bdiscr.push_back(ijet->bDiscriminator("pfJetProbabilityBJetTags")); - // _bdiscr2.push_back(ijet->bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags")); - // _bdiscr3.push_back(ijet->bDiscriminator("pfCombinedMVAV2BJetTags")); - - - //PF jet ID - float NHF = 0.; - float NEMF = 0.; - float CHF = 0.; - float MUF = 0.; - float CEMF = 0.; - int NumNeutralParticles = 0; - int chargedMult = 0; - int NumConst = 0; - float CHM = 0.; - // float NHF = ijet->neutralHadronEnergyFraction(); - // float NEMF = ijet->neutralEmEnergyFraction(); - // float CHF = ijet->chargedHadronEnergyFraction(); - // float MUF = ijet->muonEnergyFraction(); - // float CEMF = ijet->chargedEmEnergyFraction(); - // int NumNeutralParticles =ijet->neutralMultiplicity(); - // int chargedMult = ijet->chargedMultiplicity(); - // int NumConst = ijet->chargedMultiplicity()+NumNeutralParticles; - // float CHM = ijet->chargedMultiplicity(); - float absjeta = fabs(ijet->eta()); - - _jets_chEmEF .push_back(CEMF); - _jets_chHEF .push_back(CHF); - _jets_nEmEF .push_back(NEMF); - _jets_nHEF .push_back(NHF); - _jets_chMult .push_back(chargedMult); - _jets_neMult .push_back(NumNeutralParticles); - _jets_MUF .push_back(MUF); - - int jetid=0; - bool looseJetID = false; - bool tightJetID = false; - bool tightLepVetoJetID = false; - - if (absjeta <= 2.7) - { - looseJetID = ( (NHF<0.99 && NEMF<0.99 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightLepVetoJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1 && MUF<0.8) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.90) || absjeta>2.4) ); - } - else if (absjeta <= 3.0) - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>2 ) ; - tightJetID = looseJetID; - } - else - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>10 ); - tightJetID = looseJetID; - } - if (looseJetID) ++jetid; - if (tightJetID) ++jetid; - if (tightLepVetoJetID) ++jetid; - - _jetID.push_back(jetid); - // float jecFactor = ijet->jecFactor("Uncorrected") ; - // float jetRawPt = jecFactor * ijet->pt(); - // //float jetRawPt2 = ijet->pt() / jecFactor; // this is wrong - // _jets_rawPt.push_back ( jetRawPt ); - // _jets_area.push_back (ijet->jetArea()); - // _jetrawf.push_back(jecFactor); - - // loop on jet contituents to retrieve info for b jet regression - int nDau = ijet -> numberOfDaughters(); - //cout << "JET: " << (ijet - jets->begin()) << " N daught: " << nDau << endl; - - // TLorentzVector vJet (0,0,0,0); - // vJet.SetPxPyPzE (ijet->px(), ijet->py(), ijet->pz(), ijet->energy()); - // TLorentzVector vDau (0,0,0,0); - // TLorentzVector vSum (0,0,0,0); - - float leadTrackPt = 0.; - softLeptInJet.clear(); - for (int iDau = 0; iDau < nDau; ++iDau) - { - // pdg id for packed pf candidates meaning is: - // the particle charge and pdgId: 11, 13, 22 for ele/mu/gamma, 211 for charged hadrons, 130 for neutral hadrons, 1 and 2 for hadronic and em particles in HF. - const reco::Candidate * dau = ijet->daughter(iDau); - if (abs(dau->pdgId()) == 11 || abs(dau->pdgId()) == 13) - { - softLeptInJet.push_back( make_pair(dau->pt(), iDau) ); - } - - if (dau->charge() != 0 ) // tracks -> charged - { - float ptBuf = dau->pt(); - if (ptBuf > leadTrackPt) leadTrackPt = ptBuf; - } - // vDau.SetPxPyPzE (dau->px(), dau->py(), dau->pz(), dau->energy()); - // vSum += vDau; - // cout << " - " << iDau << " pdg: " << dau->pdgId() << " pt: " << dau->pt() << " charge = " << dau->charge() << endl; - } - - //cout << " ## LEAD TRACK PT = " << leadTrackPt << endl; - //cout << " ## jet eta: " << ijet->eta() << endl; - _jets_leadTrackPt.push_back(leadTrackPt); - float leptonPtRel = -1.; - float leptonPt = -1.; - float leptonDeltaR = -1.; - int softLeptIdx = -1; - if (softLeptInJet.size() > 0) - { - sort(softLeptInJet.begin(), softLeptInJet.end()); - softLeptIdx = softLeptInJet.back().second; - } - if (softLeptIdx >= 0) - { - const reco::Candidate * dau = ijet->daughter(softLeptIdx); - leptonPtRel = dau->pt() / ijet->pt() ; - leptonPt = dau->pt() ; - leptonDeltaR = deltaR(*dau, *ijet) ; - } - _jets_leptonPtRel .push_back (leptonPtRel); - _jets_leptonPt .push_back (leptonPt); - _jets_leptonDeltaR.push_back (leptonDeltaR); - - //cout << " --> jet pt, eta, phi: " << vJet.Pt() << " " << vJet.Eta() << " " << vJet.Phi() << endl; - //cout << " --> sum pt, eta, phi: " << vSum.Pt() << " " << vSum.Eta() << " " << vSum.Phi() << endl; - //if (abs(ijet->hadronFlavour()) == 5 ) cout << " ------------ THIS WAS A B JET ------------" << endl; - //cout << "RAW pt: " << jetRawPt << " | " << jetRawPt2 << " --> " << vSum.Pt() << endl; - jecUnc->setJetEta(ijet->eta()); - jecUnc->setJetPt(ijet->pt()); // here you must use the CORRECTED jet pt - _jets_jecUnc.push_back(jecUnc->getUncertainty(true)); - } - - return nJets; -} - - -#include -DEFINE_FWK_MODULE(Ntuplizer_noTagAndProbe_AOD); - -#endif //NTUPLIZER_NOTAGANDPROBE_AOD_H diff --git a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_multipleTaus.cc b/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_multipleTaus.cc deleted file mode 100644 index aa4b713eb43..00000000000 --- a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_multipleTaus.cc +++ /dev/null @@ -1,716 +0,0 @@ -#ifndef NTUPLIZER_NOTAGANDPROBE_MULTIPLETAUS_H -#define NTUPLIZER_NOTAGANDPROBE_MULTIPLETAUS_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -#include "FWCore/Framework/interface/EDAnalyzer.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include "DataFormats/PatCandidates/interface/Jet.h" - -#include "DataFormats/L1Trigger/interface/Jet.h" - -#include "FWCore/ServiceRegistry/interface/Service.h" -#include "FWCore/Common/interface/TriggerNames.h" -#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" -#include "DataFormats/L1Trigger/interface/Tau.h" -#include "DataFormats/VertexReco/interface/Vertex.h" - -#include "FWCore/Framework/interface/EventSetup.h" -#include "JetMETCorrections/Objects/interface/JetCorrector.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectorParameters.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectionUncertainty.h" -#include "JetMETCorrections/Objects/interface/JetCorrectionsRecord.h" - -#include "DataFormats/Common/interface/TriggerResults.h" - -#include "SimDataFormats/GeneratorProducts/interface/GenEventInfoProduct.h" - - -#include "tParameterSet.h" - -#include "CommonTools/UtilAlgos/interface/TFileService.h" -#include - - -//Set this variable to decide the number of triggers that you want to check simultaneously -#define NUMBER_OF_MAXIMUM_TRIGGERS 64 - - -/* - ██████ ███████ ██████ ██ █████ ██████ █████ ████████ ██ ██████ ███ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ █████ ██ ██ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██████ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -class Ntuplizer_noTagAndProbe_multipleTaus : public edm::EDAnalyzer { -public: - /// Constructor - explicit Ntuplizer_noTagAndProbe_multipleTaus(const edm::ParameterSet&); - /// Destructor - virtual ~Ntuplizer_noTagAndProbe_multipleTaus(); - -private: - //----edm control--- - virtual void beginJob() ; - virtual void beginRun(edm::Run const&, edm::EventSetup const&); - virtual void analyze(const edm::Event&, const edm::EventSetup&); - virtual void endJob(); - virtual void endRun(edm::Run const&, edm::EventSetup const&); - void Initialize(); - bool hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor); - int FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc); - - TTree *_tree; - TTree *_triggerNamesTree; - std::string _treeName; - // ------------------------------------- - // variables to be filled in output tree - ULong64_t _indexevents; - Int_t _runNumber; - Int_t _lumi; - float _MC_weight; - unsigned long _tauTriggerBits; - std::vector _tauPt; - std::vector _tauEta; - std::vector _tauPhi; - std::vector _tauCharge; - std::vector _tauDecayMode; - std::vector _hltPt; - std::vector _hltEta; - std::vector _hltPhi; - std::vector _l1tQual; - std::vector _l1tPt; - std::vector _l1tEta; - std::vector _l1tPhi; - std::vector _l1tIso; - int _l1tEmuQual; - float _l1tEmuPt; - float _l1tEmuEta; - float _l1tEmuPhi; - int _l1tEmuIso; - int _l1tEmuNTT; - int _l1tEmuHasEM; - int _l1tEmuIsMerged; - int _l1tEmuTowerIEta; - int _l1tEmuTowerIPhi; - int _l1tEmuRawEt; - int _l1tEmuIsoEt; - std::vector _l1tQualJet; - std::vector _l1tPtJet; - std::vector _l1tEtaJet; - std::vector _l1tPhiJet; - std::vector _l1tIsoJet; - std::vector _l1tTowerIEtaJet; - std::vector _l1tTowerIPhiJet; - std::vector _l1tRawEtJet; - - Bool_t _hasTriggerMuonType; - Bool_t _hasTriggerTauType; - std::vector _isMatched; - Bool_t _isOS; - int _foundJet; - int _Nvtx; - - - //Jets variables - Int_t _numberOfJets; - std::vector _jets_px; - std::vector _jets_py; - std::vector _jets_pz; - std::vector _jets_e; - std::vector _jets_rawPt; - std::vector _jets_area; - std::vector _jets_mT; - std::vector _jets_PUJetID; - std::vector _jets_PUJetIDupdated; - std::vector _jets_vtxPt; - std::vector _jets_vtxMass; - std::vector _jets_vtx3dL; - std::vector _jets_vtxNtrk; - std::vector _jets_vtx3deL; - std::vector _jets_leadTrackPt; - std::vector _jets_leptonPtRel; - std::vector _jets_leptonPt; - std::vector _jets_leptonDeltaR; - std::vector _jets_chEmEF; - std::vector _jets_chHEF; - std::vector _jets_nEmEF; - std::vector _jets_nHEF; - std::vector _jets_MUF; - std::vector _jets_neMult; - std::vector _jets_chMult; - std::vector _jets_jecUnc; - - std::vector _jets_QGdiscr; - - std::vector _jets_Flavour; // parton flavour - std::vector _jets_HadronFlavour; // hadron flavour - std::vector _jets_genjetIndex; - std::vector _bdiscr; - std::vector _bdiscr2; - std::vector _bdiscr3; - std::vector _jetID; //1=loose, 2=tight, 3=tightlepveto - std::vector _jetrawf; - - edm::EDGetTokenT _genTag; - edm::EDGetTokenT _tauTag; - edm::EDGetTokenT _triggerObjects; - edm::EDGetTokenT _triggerBits; - edm::EDGetTokenT _L1TauTag ; - edm::EDGetTokenT _L1EmuTauTag ; - edm::EDGetTokenT> _JetTag; - edm::EDGetTokenT> _l1tJetTag; - // edm::EDGetTokenT> _l1tEmuJetTag; - edm::EDGetTokenT> _VtxTag; - - //!Contains the parameters - tVParameterSet _parameters; - - edm::InputTag _processName; - //! Maximum - std::bitset _tauTriggerBitSet; - - - - HLTConfigProvider _hltConfig; - - -}; - -/* - ██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██ - ██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -// ----Constructor and Destructor ----- -Ntuplizer_noTagAndProbe_multipleTaus::Ntuplizer_noTagAndProbe_multipleTaus(const edm::ParameterSet& iConfig) : - _genTag (consumes (iConfig.getParameter("genCollection"))), - _tauTag (consumes (iConfig.getParameter("taus"))), - _triggerObjects (consumes (iConfig.getParameter("triggerSet"))), - _triggerBits (consumes (iConfig.getParameter("triggerResultsLabel"))), - _L1TauTag (consumes (iConfig.getParameter("L1Tau"))), - _L1EmuTauTag (consumes (iConfig.getParameter("L1EmuTau"))), -_JetTag (consumes> (iConfig.getParameter("jetCollection"))), -_l1tJetTag (consumes> (iConfig.getParameter("l1tJetCollection"))), - _VtxTag (consumes> (iConfig.getParameter("Vertexes"))) -{ - this -> _treeName = iConfig.getParameter("treeName"); - this -> _processName = iConfig.getParameter("triggerResultsLabel"); - - TString triggerName; - edm::Service fs; - this -> _triggerNamesTree = fs -> make("triggerNames", "triggerNames"); - this -> _triggerNamesTree -> Branch("triggerNames",&triggerName); - - //Building the trigger arrays - const std::vector& HLTList = iConfig.getParameter > ("triggerList"); - for (const edm::ParameterSet& parameterSet : HLTList) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - triggerName = pSet.hltPath; - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - this -> _parameters.push_back(pSet); - - this -> _triggerNamesTree -> Fill(); - } - - - this -> Initialize(); - return; -} - -Ntuplizer_noTagAndProbe_multipleTaus::~Ntuplizer_noTagAndProbe_multipleTaus() -{} - -void Ntuplizer_noTagAndProbe_multipleTaus::beginRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - Bool_t changedConfig = false; - - if(!this -> _hltConfig.init(iRun, iSetup, this -> _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTConfigProvider failed!!"; - return; - } - - const edm::TriggerNames::Strings& triggerNames = this -> _hltConfig.triggerNames(); - //std::cout << " ===== LOOKING FOR THE PATH INDEXES =====" << std::endl; - for (tParameterSet& parameter : this -> _parameters){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - //std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - //std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - -} - -void Ntuplizer_noTagAndProbe_multipleTaus::Initialize() { - this -> _indexevents = 0; - this -> _runNumber = 0; - this -> _lumi = 0; - this -> _MC_weight = 1; - this -> _tauPt.clear(); - this -> _tauEta.clear(); - this -> _tauPhi.clear(); - this -> _tauCharge.clear(); - this -> _tauDecayMode.clear(); - this -> _isMatched.clear(); - this -> _hltPt.clear(); - this -> _hltEta.clear(); - this -> _hltPhi.clear(); - this -> _l1tPt.clear(); - this -> _l1tEta.clear(); - this -> _l1tPhi.clear(); - this -> _l1tQual.clear(); - this -> _l1tIso.clear(); - this -> _l1tEmuPt = -1; - this -> _l1tEmuEta = 666; - this -> _l1tEmuPhi = 666; - this -> _l1tEmuQual = -1; - this -> _l1tEmuIso = -1; - this -> _l1tEmuNTT = -1; - this -> _l1tEmuHasEM = -1; - this -> _l1tEmuIsMerged = -1; - this -> _l1tEmuTowerIEta = -1; - this -> _l1tEmuTowerIPhi = -1; - this -> _l1tEmuRawEt = -1; - this -> _l1tEmuIsoEt = -1; - this -> _foundJet = 0; - - this -> _l1tPtJet . clear(); - this -> _l1tEtaJet . clear(); - this -> _l1tPhiJet . clear(); - this -> _l1tQualJet . clear(); - this -> _l1tIsoJet . clear(); - this -> _l1tTowerIEtaJet . clear(); - this -> _l1tTowerIPhiJet . clear(); - this -> _l1tRawEtJet . clear(); - - _jets_px.clear(); - _jets_py.clear(); - _jets_pz.clear(); - _jets_e.clear(); - _jets_rawPt.clear(); - _jets_area.clear(); - _jets_mT.clear(); - _jets_PUJetID.clear(); - _jets_PUJetIDupdated.clear(); - _jets_vtxPt.clear(); - _jets_vtxMass.clear(); - _jets_vtx3dL.clear(); - _jets_vtxNtrk.clear(); - _jets_vtx3deL.clear(); - _jets_leadTrackPt.clear(); - _jets_leptonPtRel.clear(); - _jets_leptonPt.clear(); - _jets_leptonDeltaR.clear(); - _jets_chEmEF.clear(); - _jets_chHEF.clear(); - _jets_nEmEF.clear(); - _jets_nHEF.clear(); - _jets_MUF.clear(); - _jets_neMult.clear(); - _jets_chMult.clear(); - _jets_Flavour.clear(); - _jets_HadronFlavour.clear(); - _jets_genjetIndex.clear(); - _jets_jecUnc.clear(); - _jets_QGdiscr.clear(); - _numberOfJets=0; - _bdiscr.clear(); - _bdiscr2.clear(); - _bdiscr3.clear(); - _jetID.clear(); - _jetrawf.clear(); - -} - - -void Ntuplizer_noTagAndProbe_multipleTaus::beginJob() -{ - edm::Service fs; - this -> _tree = fs -> make(this -> _treeName.c_str(), this -> _treeName.c_str()); - - //Branches - this -> _tree -> Branch("EventNumber",&_indexevents,"EventNumber/l"); - this -> _tree -> Branch("RunNumber",&_runNumber,"RunNumber/I"); - this -> _tree -> Branch("lumi",&_lumi,"lumi/I"); - this -> _tree -> Branch("MC_weight",&_MC_weight,"MC_weight/F"); - this -> _tree -> Branch("tauTriggerBits", &_tauTriggerBits, "tauTriggerBits/l"); - this -> _tree -> Branch("tauPt", &_tauPt); - this -> _tree -> Branch("tauEta", &_tauEta); - this -> _tree -> Branch("tauPhi", &_tauPhi); - this -> _tree -> Branch("tauCharge", &_tauCharge); - this -> _tree -> Branch("tauDecayMode", &_tauDecayMode); - this -> _tree -> Branch("hltPt", &_hltPt); - this -> _tree -> Branch("hltEta", &_hltEta); - this -> _tree -> Branch("hltPhi", &_hltPhi); - this -> _tree -> Branch("l1tPt", &_l1tPt); - this -> _tree -> Branch("l1tEta", &_l1tEta); - this -> _tree -> Branch("l1tPhi", &_l1tPhi); - this -> _tree -> Branch("l1tQual", &_l1tQual); - this -> _tree -> Branch("l1tIso", &_l1tIso); - this -> _tree -> Branch("l1tEmuPt", &_l1tEmuPt, "l1tEmuPt/F"); - this -> _tree -> Branch("l1tEmuEta", &_l1tEmuEta, "l1tEmuEta/F"); - this -> _tree -> Branch("l1tEmuPhi", &_l1tEmuPhi, "l1tEmuPhi/F"); - this -> _tree -> Branch("l1tEmuQual", &_l1tEmuQual, "l1tEmuQual/I"); - this -> _tree -> Branch("l1tEmuIso", &_l1tEmuIso, "l1tEmuIso/I"); - this -> _tree -> Branch("l1tEmuNTT", &_l1tEmuNTT, "l1tEmuNTT/I"); - this -> _tree -> Branch("l1tEmuHasEM", &_l1tEmuHasEM, "l1tEmuHasEM/I"); - this -> _tree -> Branch("l1tEmuIsMerged", &_l1tEmuIsMerged, "l1tEmuIsMerged/I"); - this -> _tree -> Branch("l1tEmuTowerIEta", &_l1tEmuTowerIEta, "l1tEmuTowerIEta/I"); - this -> _tree -> Branch("l1tEmuTowerIPhi", &_l1tEmuTowerIPhi, "l1tEmuTowerIPhi/I"); - this -> _tree -> Branch("l1tEmuRawEt", &_l1tEmuRawEt, "l1tEmuRawEt/I"); - this -> _tree -> Branch("l1tEmuIsoEt", &_l1tEmuIsoEt, "l1tEmuIsoEt/I"); - - this -> _tree -> Branch("l1tPtJet", &_l1tPtJet); - this -> _tree -> Branch("l1tEtaJet", &_l1tEtaJet); - this -> _tree -> Branch("l1tPhiJet", &_l1tPhiJet); - this -> _tree -> Branch("l1tQualJet", &_l1tQualJet); - this -> _tree -> Branch("l1tIsoJet", &_l1tIsoJet); - this -> _tree -> Branch("l1tTowerIEtaJet", &_l1tTowerIEtaJet); - this -> _tree -> Branch("l1tTowerIPhiJet", &_l1tTowerIPhiJet); - this -> _tree -> Branch("l1tRawEtJet", &_l1tRawEtJet); - - this -> _tree -> Branch("hasTriggerMuonType", &_hasTriggerMuonType, "hasTriggerMuonType/O"); - this -> _tree -> Branch("hasTriggerTauType", &_hasTriggerTauType, "hasTriggerTauType/O"); - this -> _tree -> Branch("isMatched", &_isMatched); - // this -> _tree -> Branch("isMatched", &_isMatched, "isMatched/O"); - this -> _tree -> Branch("isOS", &_isOS, "isOS/O"); - this -> _tree -> Branch("foundJet", &_foundJet, "foundJet/I"); - this -> _tree -> Branch("Nvtx", &_Nvtx, "Nvtx/I"); - - this -> _tree->Branch("JetsNumber",&_numberOfJets,"JetsNumber/I"); - this -> _tree->Branch("jets_px",&_jets_px); - this -> _tree->Branch("jets_py",&_jets_py); - this -> _tree->Branch("jets_pz",&_jets_pz); - this -> _tree->Branch("jets_e",&_jets_e); - this -> _tree->Branch("jets_rawPt", &_jets_rawPt); - this -> _tree->Branch("jets_area", &_jets_area); - this -> _tree->Branch("jets_mT", &_jets_mT); - this -> _tree->Branch("jets_Flavour",&_jets_Flavour); - this -> _tree->Branch("jets_HadronFlavour",&_jets_HadronFlavour); - this -> _tree->Branch("jets_genjetIndex", &_jets_genjetIndex); - this -> _tree->Branch("jets_PUJetID",&_jets_PUJetID); - this -> _tree->Branch("jets_PUJetIDupdated",&_jets_PUJetIDupdated); - this -> _tree->Branch("jets_vtxPt", &_jets_vtxPt); - this -> _tree->Branch("jets_vtxMass", &_jets_vtxMass); - this -> _tree->Branch("jets_vtx3dL", &_jets_vtx3dL); - this -> _tree->Branch("jets_vtxNtrk", &_jets_vtxNtrk); - this -> _tree->Branch("jets_vtx3deL", &_jets_vtx3deL); - this -> _tree->Branch("jets_leadTrackPt", &_jets_leadTrackPt); - this -> _tree->Branch("jets_leptonPtRel", &_jets_leptonPtRel); - this -> _tree->Branch("jets_leptonPt", &_jets_leptonPt); - this -> _tree->Branch("jets_leptonDeltaR", &_jets_leptonDeltaR); - this -> _tree->Branch("jets_chEmEF" , &_jets_chEmEF); - this -> _tree->Branch("jets_chHEF" , &_jets_chHEF); - this -> _tree->Branch("jets_nEmEF" , &_jets_nEmEF); - this -> _tree->Branch("jets_nHEF" , &_jets_nHEF); - this -> _tree->Branch("jets_MUF" , &_jets_MUF); - this -> _tree->Branch("jets_neMult" , &_jets_neMult); - this -> _tree->Branch("jets_chMult" , &_jets_chMult); - this -> _tree->Branch("jets_jecUnc" , &_jets_jecUnc); - - return; -} - - -void Ntuplizer_noTagAndProbe_multipleTaus::endJob() -{ - return; -} - - -void Ntuplizer_noTagAndProbe_multipleTaus::endRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - return; -} - - -void Ntuplizer_noTagAndProbe_multipleTaus::analyze(const edm::Event& iEvent, const edm::EventSetup& eSetup) -{ - this -> Initialize(); - - _indexevents = iEvent.id().event(); - _runNumber = iEvent.id().run(); - _lumi = iEvent.luminosityBlock(); - - edm::Handle genEvt; - try {iEvent.getByToken(_genTag, genEvt);} catch (...) {;} - if(genEvt.isValid()) this->_MC_weight = genEvt->weight(); - - //cout<<"EventNumber = "<<_indexevents< resultMuon(new pat::MuonRefVector); - - // search for the tag in the event - edm::Handle tauHandle; - edm::Handle triggerObjects; - edm::Handle triggerBits; - edm::Handle> jetHandle; - edm::Handle> l1tJetHandle; - edm::Handle > vertexes; - - iEvent.getByToken(this -> _tauTag, tauHandle); - iEvent.getByToken(this -> _triggerObjects, triggerObjects); - iEvent.getByToken(this -> _triggerBits, triggerBits); - iEvent.getByToken(this -> _JetTag, jetHandle); - iEvent.getByToken(this -> _l1tJetTag, l1tJetHandle); - iEvent.getByToken(this -> _VtxTag,vertexes); - - for(BXVector::const_iterator jet = l1tJetHandle -> begin(0); jet != l1tJetHandle -> end(0) ; jet++) - { - this -> _l1tPtJet . push_back(jet -> pt()); - this -> _l1tEtaJet . push_back(jet -> eta()); - this -> _l1tPhiJet . push_back(jet -> phi()); - this -> _l1tIsoJet . push_back(jet -> hwIso()); - //this -> _l1tNTTJet . push_back(jet -> nTT()); - this -> _l1tQualJet . push_back(jet -> hwQual()); - //this -> _l1tHasEMJet . push_back(jet -> hasEM()); - //this -> _l1tIsMergedJet . push_back(jet -> isMerged()); - this -> _l1tTowerIEtaJet . push_back(jet -> towerIEta()); - this -> _l1tTowerIPhiJet . push_back(jet -> towerIPhi()); - this -> _l1tRawEtJet . push_back(jet -> rawEt()); - //this -> _l1tIsoEtJet . push_back(jet -> isoEt()); - } - - edm::Handle< BXVector > L1TauHandle; - iEvent.getByToken(_L1TauTag, L1TauHandle); - - for (l1t::TauBxCollection::const_iterator bx0TauIt = L1TauHandle->begin(0); bx0TauIt != L1TauHandle->end(0) ; bx0TauIt++) - { - this -> _l1tPt .push_back (bx0TauIt->pt()); - this -> _l1tEta .push_back (bx0TauIt->eta()); - this -> _l1tPhi .push_back (bx0TauIt->phi()); - this -> _l1tIso .push_back (bx0TauIt->hwIso()); - this -> _l1tQual .push_back (bx0TauIt->hwQual()); - } - - for(UInt_t iTau = 0 ; iTau < tauHandle->size() ; ++iTau) - { - const pat::TauRef tau = (*tauHandle)[iTau] ; - this -> _tauPt.push_back( tau -> pt() ); - this -> _tauEta.push_back( tau -> eta() ); - this -> _tauPhi.push_back( tau -> phi() ); - this -> _tauCharge.push_back( tau -> charge() ); - this -> _tauDecayMode.push_back( tau -> decayMode() ); - } - - this -> _Nvtx = vertexes->size(); - - this -> _tauTriggerBits = this -> _tauTriggerBitSet.to_ulong(); - - const edm::View* jets = jetHandle.product(); - edm::ESHandle JetCorParColl; - eSetup.get().get("AK4PFchs",JetCorParColl); - JetCorrectorParameters const & JetCorPar = (*JetCorParColl)["Uncertainty"]; - JetCorrectionUncertainty jecUnc (JetCorPar); - _numberOfJets = FillJet(jets,iEvent, &jecUnc); - - - this -> _tree -> Fill(); - -} - -bool Ntuplizer_noTagAndProbe_multipleTaus::hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor) { - - const std::vector& eventLabels = obj.filterLabels(); - for (const std::string& filter : filtersToLookFor) - { - //Looking for matching filters - bool found = false; - for (const std::string& label : eventLabels) - { - if (label == filter) - { - found = true; - } - } - if(!found) return false; - } - - return true; -} - -int Ntuplizer_noTagAndProbe_multipleTaus::FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc){ - int nJets=0; - vector > softLeptInJet; // pt, idx - for(edm::View::const_iterator ijet = jets->begin(); ijet!=jets->end();++ijet){ - nJets++; - _jets_px.push_back( (float) ijet->px()); - _jets_py.push_back( (float) ijet->py()); - _jets_pz.push_back( (float) ijet->pz()); - _jets_e.push_back( (float) ijet->energy()); - _jets_mT.push_back( (float) ijet->mt()); - _jets_Flavour.push_back(ijet->partonFlavour()); - _jets_HadronFlavour.push_back(ijet->hadronFlavour()); - _jets_PUJetID.push_back(ijet->userFloat("pileupJetId:fullDiscriminant")); - _jets_PUJetIDupdated.push_back(ijet->hasUserFloat("pileupJetIdUpdated:fullDiscriminant") ? ijet->userFloat("pileupJetIdUpdated:fullDiscriminant") : -999); - float vtxPx = ijet->userFloat ("vtxPx"); - float vtxPy = ijet->userFloat ("vtxPy"); - _jets_vtxPt. push_back(TMath::Sqrt(vtxPx*vtxPx + vtxPy*vtxPy)); - _jets_vtxMass.push_back(ijet->userFloat("vtxMass")); - _jets_vtx3dL. push_back(ijet->userFloat("vtx3DVal")); - _jets_vtxNtrk.push_back(ijet->userFloat("vtxNtracks")); - _jets_vtx3deL.push_back(ijet->userFloat("vtx3DSig")); - - _bdiscr.push_back(ijet->bDiscriminator("pfJetProbabilityBJetTags")); - _bdiscr2.push_back(ijet->bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags")); - _bdiscr3.push_back(ijet->bDiscriminator("pfCombinedMVAV2BJetTags")); - - - //PF jet ID - float NHF = ijet->neutralHadronEnergyFraction(); - float NEMF = ijet->neutralEmEnergyFraction(); - float CHF = ijet->chargedHadronEnergyFraction(); - float MUF = ijet->muonEnergyFraction(); - float CEMF = ijet->chargedEmEnergyFraction(); - int NumNeutralParticles =ijet->neutralMultiplicity(); - int chargedMult = ijet->chargedMultiplicity(); - int NumConst = ijet->chargedMultiplicity()+NumNeutralParticles; - float CHM = ijet->chargedMultiplicity(); - float absjeta = fabs(ijet->eta()); - - _jets_chEmEF .push_back(CEMF); - _jets_chHEF .push_back(CHF); - _jets_nEmEF .push_back(NEMF); - _jets_nHEF .push_back(NHF); - _jets_chMult .push_back(chargedMult); - _jets_neMult .push_back(NumNeutralParticles); - _jets_MUF .push_back(MUF); - - int jetid=0; - bool looseJetID = false; - bool tightJetID = false; - bool tightLepVetoJetID = false; - - if (absjeta <= 2.7) - { - looseJetID = ( (NHF<0.99 && NEMF<0.99 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightLepVetoJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1 && MUF<0.8) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.90) || absjeta>2.4) ); - } - else if (absjeta <= 3.0) - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>2 ) ; - tightJetID = looseJetID; - } - else - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>10 ); - tightJetID = looseJetID; - } - if (looseJetID) ++jetid; - if (tightJetID) ++jetid; - if (tightLepVetoJetID) ++jetid; - - _jetID.push_back(jetid); - float jecFactor = ijet->jecFactor("Uncorrected") ; - float jetRawPt = jecFactor * ijet->pt(); - //float jetRawPt2 = ijet->pt() / jecFactor; // this is wrong - _jets_rawPt.push_back ( jetRawPt ); - _jets_area.push_back (ijet->jetArea()); - _jetrawf.push_back(jecFactor); - - // loop on jet contituents to retrieve info for b jet regression - int nDau = ijet -> numberOfDaughters(); - //cout << "JET: " << (ijet - jets->begin()) << " N daught: " << nDau << endl; - - // TLorentzVector vJet (0,0,0,0); - // vJet.SetPxPyPzE (ijet->px(), ijet->py(), ijet->pz(), ijet->energy()); - // TLorentzVector vDau (0,0,0,0); - // TLorentzVector vSum (0,0,0,0); - - float leadTrackPt = 0.; - softLeptInJet.clear(); - for (int iDau = 0; iDau < nDau; ++iDau) - { - // pdg id for packed pf candidates meaning is: - // the particle charge and pdgId: 11, 13, 22 for ele/mu/gamma, 211 for charged hadrons, 130 for neutral hadrons, 1 and 2 for hadronic and em particles in HF. - const reco::Candidate * dau = ijet->daughter(iDau); - if (abs(dau->pdgId()) == 11 || abs(dau->pdgId()) == 13) - { - softLeptInJet.push_back( make_pair(dau->pt(), iDau) ); - } - - if (dau->charge() != 0 ) // tracks -> charged - { - float ptBuf = dau->pt(); - if (ptBuf > leadTrackPt) leadTrackPt = ptBuf; - } - // vDau.SetPxPyPzE (dau->px(), dau->py(), dau->pz(), dau->energy()); - // vSum += vDau; - // cout << " - " << iDau << " pdg: " << dau->pdgId() << " pt: " << dau->pt() << " charge = " << dau->charge() << endl; - } - - //cout << " ## LEAD TRACK PT = " << leadTrackPt << endl; - //cout << " ## jet eta: " << ijet->eta() << endl; - _jets_leadTrackPt.push_back(leadTrackPt); - float leptonPtRel = -1.; - float leptonPt = -1.; - float leptonDeltaR = -1.; - int softLeptIdx = -1; - if (softLeptInJet.size() > 0) - { - sort(softLeptInJet.begin(), softLeptInJet.end()); - softLeptIdx = softLeptInJet.back().second; - } - if (softLeptIdx >= 0) - { - const reco::Candidate * dau = ijet->daughter(softLeptIdx); - leptonPtRel = dau->pt() / ijet->pt() ; - leptonPt = dau->pt() ; - leptonDeltaR = deltaR(*dau, *ijet) ; - } - _jets_leptonPtRel .push_back (leptonPtRel); - _jets_leptonPt .push_back (leptonPt); - _jets_leptonDeltaR.push_back (leptonDeltaR); - - //cout << " --> jet pt, eta, phi: " << vJet.Pt() << " " << vJet.Eta() << " " << vJet.Phi() << endl; - //cout << " --> sum pt, eta, phi: " << vSum.Pt() << " " << vSum.Eta() << " " << vSum.Phi() << endl; - //if (abs(ijet->hadronFlavour()) == 5 ) cout << " ------------ THIS WAS A B JET ------------" << endl; - //cout << "RAW pt: " << jetRawPt << " | " << jetRawPt2 << " --> " << vSum.Pt() << endl; - jecUnc->setJetEta(ijet->eta()); - jecUnc->setJetPt(ijet->pt()); // here you must use the CORRECTED jet pt - _jets_jecUnc.push_back(jecUnc->getUncertainty(true)); - } - - return nJets; -} - - -#include -DEFINE_FWK_MODULE(Ntuplizer_noTagAndProbe_multipleTaus); - -#endif //NTUPLIZER_NOTAGANDPROBE_MULTIPLETAUS_H diff --git a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_multipleTaus_AOD.cc b/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_multipleTaus_AOD.cc deleted file mode 100644 index 867328636aa..00000000000 --- a/TauTagAndProbe/plugins/Ntuplizer_noTagAndProbe_multipleTaus_AOD.cc +++ /dev/null @@ -1,729 +0,0 @@ -#ifndef NTUPLIZER_NOTAGANDPROBE_MULTIPLETAUS_AOD_H -#define NTUPLIZER_NOTAGANDPROBE_MULTIPLETAUS_AOD_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -#include "FWCore/Framework/interface/EDAnalyzer.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include "DataFormats/PatCandidates/interface/Jet.h" - -#include "DataFormats/L1Trigger/interface/Jet.h" - -#include "FWCore/ServiceRegistry/interface/Service.h" -#include "FWCore/Common/interface/TriggerNames.h" -#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" -#include "DataFormats/L1Trigger/interface/Tau.h" -#include "DataFormats/VertexReco/interface/Vertex.h" - -#include "FWCore/Framework/interface/EventSetup.h" -#include "JetMETCorrections/Objects/interface/JetCorrector.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectorParameters.h" -#include "CondFormats/JetMETObjects/interface/JetCorrectionUncertainty.h" -#include "JetMETCorrections/Objects/interface/JetCorrectionsRecord.h" - -#include "DataFormats/Common/interface/TriggerResults.h" - -#include "tParameterSet.h" - -#include "CommonTools/UtilAlgos/interface/TFileService.h" -#include - -#include "DataFormats/TauReco/interface/PFTau.h" -#include "DataFormats/TauReco/interface/PFTauFwd.h" -#include "DataFormats/TauReco/interface/PFTauDiscriminator.h" - -#include "DataFormats/JetReco/interface/PFJetCollection.h" -#include - - -//Set this variable to decide the number of triggers that you want to check simultaneously -#define NUMBER_OF_MAXIMUM_TRIGGERS 64 - - -/* - ██████ ███████ ██████ ██ █████ ██████ █████ ████████ ██ ██████ ███ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ █████ ██ ██ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██████ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -class Ntuplizer_noTagAndProbe_multipleTaus_AOD : public edm::EDAnalyzer { -public: - /// Constructor - explicit Ntuplizer_noTagAndProbe_multipleTaus_AOD(const edm::ParameterSet&); - /// Destructor - virtual ~Ntuplizer_noTagAndProbe_multipleTaus_AOD(); - -private: - //----edm control--- - virtual void beginJob() ; - virtual void beginRun(edm::Run const&, edm::EventSetup const&); - virtual void analyze(const edm::Event&, const edm::EventSetup&); - virtual void endJob(); - virtual void endRun(edm::Run const&, edm::EventSetup const&); - void Initialize(); - bool hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor); - int FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc); - // int FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc); - - TTree *_tree; - TTree *_triggerNamesTree; - std::string _treeName; - // ------------------------------------- - // variables to be filled in output tree - ULong64_t _indexevents; - Int_t _runNumber; - Int_t _lumi; - unsigned long _tauTriggerBits; - std::vector _tauPt; - std::vector _tauEta; - std::vector _tauPhi; - std::vector _tauCharge; - std::vector _tauDecayMode; - std::vector _hltPt; - std::vector _hltEta; - std::vector _hltPhi; - std::vector _l1tQual; - std::vector _l1tPt; - std::vector _l1tEta; - std::vector _l1tPhi; - std::vector _l1tIso; - int _l1tEmuQual; - float _l1tEmuPt; - float _l1tEmuEta; - float _l1tEmuPhi; - int _l1tEmuIso; - int _l1tEmuNTT; - int _l1tEmuHasEM; - int _l1tEmuIsMerged; - int _l1tEmuTowerIEta; - int _l1tEmuTowerIPhi; - int _l1tEmuRawEt; - int _l1tEmuIsoEt; - std::vector _l1tQualJet; - std::vector _l1tPtJet; - std::vector _l1tEtaJet; - std::vector _l1tPhiJet; - std::vector _l1tIsoJet; - std::vector _l1tTowerIEtaJet; - std::vector _l1tTowerIPhiJet; - std::vector _l1tRawEtJet; - - Bool_t _hasTriggerMuonType; - Bool_t _hasTriggerTauType; - std::vector _isMatched; - Bool_t _isOS; - int _foundJet; - int _Nvtx; - - - //Jets variables - Int_t _numberOfJets; - std::vector _jets_px; - std::vector _jets_py; - std::vector _jets_pz; - std::vector _jets_e; - std::vector _jets_rawPt; - std::vector _jets_area; - std::vector _jets_mT; - std::vector _jets_PUJetID; - std::vector _jets_PUJetIDupdated; - std::vector _jets_vtxPt; - std::vector _jets_vtxMass; - std::vector _jets_vtx3dL; - std::vector _jets_vtxNtrk; - std::vector _jets_vtx3deL; - std::vector _jets_leadTrackPt; - std::vector _jets_leptonPtRel; - std::vector _jets_leptonPt; - std::vector _jets_leptonDeltaR; - std::vector _jets_chEmEF; - std::vector _jets_chHEF; - std::vector _jets_nEmEF; - std::vector _jets_nHEF; - std::vector _jets_MUF; - std::vector _jets_neMult; - std::vector _jets_chMult; - std::vector _jets_jecUnc; - - std::vector _jets_QGdiscr; - - std::vector _jets_Flavour; // parton flavour - std::vector _jets_HadronFlavour; // hadron flavour - std::vector _jets_genjetIndex; - std::vector _bdiscr; - std::vector _bdiscr2; - std::vector _bdiscr3; - std::vector _jetID; //1=loose, 2=tight, 3=tightlepveto - std::vector _jetrawf; - - edm::EDGetTokenT _tauTag; - //edm::EDGetTokenT _triggerObjects; - edm::EDGetTokenT _triggerBits; - edm::EDGetTokenT _L1TauTag ; - edm::EDGetTokenT _L1EmuTauTag ; - edm::EDGetTokenT> _JetTag; - edm::EDGetTokenT> _l1tJetTag; - // edm::EDGetTokenT> _l1tEmuJetTag; - edm::EDGetTokenT> _VtxTag; - - //!Contains the parameters - tVParameterSet _parameters; - - edm::InputTag _processName; - //! Maximum - std::bitset _tauTriggerBitSet; - - - - HLTConfigProvider _hltConfig; - - -}; - -/* - ██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██ - ██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -// ----Constructor and Destructor ----- -Ntuplizer_noTagAndProbe_multipleTaus_AOD::Ntuplizer_noTagAndProbe_multipleTaus_AOD(const edm::ParameterSet& iConfig) : - _tauTag (consumes (iConfig.getParameter("taus"))), - //_triggerObjects (consumes (iConfig.getParameter("triggerSet"))), - _triggerBits (consumes (iConfig.getParameter("triggerResultsLabel"))), - _L1TauTag (consumes (iConfig.getParameter("L1Tau"))), - _L1EmuTauTag (consumes (iConfig.getParameter("L1EmuTau"))), - _JetTag (consumes> (iConfig.getParameter("jetCollection"))), - _l1tJetTag (consumes> (iConfig.getParameter("l1tJetCollection"))), - _VtxTag (consumes> (iConfig.getParameter("Vertexes"))) -{ - this -> _treeName = iConfig.getParameter("treeName"); - this -> _processName = iConfig.getParameter("triggerResultsLabel"); - - TString triggerName; - edm::Service fs; - this -> _triggerNamesTree = fs -> make("triggerNames", "triggerNames"); - this -> _triggerNamesTree -> Branch("triggerNames",&triggerName); - - //Building the trigger arrays - const std::vector& HLTList = iConfig.getParameter > ("triggerList"); - for (const edm::ParameterSet& parameterSet : HLTList) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - triggerName = pSet.hltPath; - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - this -> _parameters.push_back(pSet); - - this -> _triggerNamesTree -> Fill(); - } - - - this -> Initialize(); - return; -} - -Ntuplizer_noTagAndProbe_multipleTaus_AOD::~Ntuplizer_noTagAndProbe_multipleTaus_AOD() -{} - -void Ntuplizer_noTagAndProbe_multipleTaus_AOD::beginRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - Bool_t changedConfig = false; - - if(!this -> _hltConfig.init(iRun, iSetup, this -> _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTConfigProvider failed!!"; - return; - } - - const edm::TriggerNames::Strings& triggerNames = this -> _hltConfig.triggerNames(); - //std::cout << " ===== LOOKING FOR THE PATH INDEXES =====" << std::endl; - for (tParameterSet& parameter : this -> _parameters){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - //std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - //std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - -} - -void Ntuplizer_noTagAndProbe_multipleTaus_AOD::Initialize() { - this -> _indexevents = 0; - this -> _runNumber = 0; - this -> _lumi = 0; - this -> _tauPt.clear(); - this -> _tauEta.clear(); - this -> _tauPhi.clear(); - this -> _tauCharge.clear(); - this -> _tauDecayMode.clear(); - this -> _isMatched.clear(); - this -> _hltPt.clear(); - this -> _hltEta.clear(); - this -> _hltPhi.clear(); - this -> _l1tPt.clear(); - this -> _l1tEta.clear(); - this -> _l1tPhi.clear(); - this -> _l1tQual.clear(); - this -> _l1tIso.clear(); - this -> _l1tEmuPt = -1; - this -> _l1tEmuEta = 666; - this -> _l1tEmuPhi = 666; - this -> _l1tEmuQual = -1; - this -> _l1tEmuIso = -1; - this -> _l1tEmuNTT = -1; - this -> _l1tEmuHasEM = -1; - this -> _l1tEmuIsMerged = -1; - this -> _l1tEmuTowerIEta = -1; - this -> _l1tEmuTowerIPhi = -1; - this -> _l1tEmuRawEt = -1; - this -> _l1tEmuIsoEt = -1; - this -> _foundJet = 0; - - this -> _l1tPtJet . clear(); - this -> _l1tEtaJet . clear(); - this -> _l1tPhiJet . clear(); - this -> _l1tQualJet . clear(); - this -> _l1tIsoJet . clear(); - this -> _l1tTowerIEtaJet . clear(); - this -> _l1tTowerIPhiJet . clear(); - this -> _l1tRawEtJet . clear(); - - _jets_px.clear(); - _jets_py.clear(); - _jets_pz.clear(); - _jets_e.clear(); - _jets_rawPt.clear(); - _jets_area.clear(); - _jets_mT.clear(); - _jets_PUJetID.clear(); - _jets_PUJetIDupdated.clear(); - _jets_vtxPt.clear(); - _jets_vtxMass.clear(); - _jets_vtx3dL.clear(); - _jets_vtxNtrk.clear(); - _jets_vtx3deL.clear(); - _jets_leadTrackPt.clear(); - _jets_leptonPtRel.clear(); - _jets_leptonPt.clear(); - _jets_leptonDeltaR.clear(); - _jets_chEmEF.clear(); - _jets_chHEF.clear(); - _jets_nEmEF.clear(); - _jets_nHEF.clear(); - _jets_MUF.clear(); - _jets_neMult.clear(); - _jets_chMult.clear(); - _jets_Flavour.clear(); - _jets_HadronFlavour.clear(); - _jets_genjetIndex.clear(); - _jets_jecUnc.clear(); - _jets_QGdiscr.clear(); - _numberOfJets=0; - _bdiscr.clear(); - _bdiscr2.clear(); - _bdiscr3.clear(); - _jetID.clear(); - _jetrawf.clear(); - -} - - -void Ntuplizer_noTagAndProbe_multipleTaus_AOD::beginJob() -{ - edm::Service fs; - this -> _tree = fs -> make(this -> _treeName.c_str(), this -> _treeName.c_str()); - - //Branches - this -> _tree -> Branch("EventNumber",&_indexevents,"EventNumber/l"); - this -> _tree -> Branch("RunNumber",&_runNumber,"RunNumber/I"); - this -> _tree -> Branch("lumi",&_lumi,"lumi/I"); - this -> _tree -> Branch("tauTriggerBits", &_tauTriggerBits, "tauTriggerBits/l"); - this -> _tree -> Branch("tauPt", &_tauPt); - this -> _tree -> Branch("tauEta", &_tauEta); - this -> _tree -> Branch("tauPhi", &_tauPhi); - this -> _tree -> Branch("tauCharge", &_tauCharge); - this -> _tree -> Branch("tauDecayMode", &_tauDecayMode); - this -> _tree -> Branch("hltPt", &_hltPt); - this -> _tree -> Branch("hltEta", &_hltEta); - this -> _tree -> Branch("hltPhi", &_hltPhi); - this -> _tree -> Branch("l1tPt", &_l1tPt); - this -> _tree -> Branch("l1tEta", &_l1tEta); - this -> _tree -> Branch("l1tPhi", &_l1tPhi); - this -> _tree -> Branch("l1tQual", &_l1tQual); - this -> _tree -> Branch("l1tIso", &_l1tIso); - this -> _tree -> Branch("l1tEmuPt", &_l1tEmuPt, "l1tEmuPt/F"); - this -> _tree -> Branch("l1tEmuEta", &_l1tEmuEta, "l1tEmuEta/F"); - this -> _tree -> Branch("l1tEmuPhi", &_l1tEmuPhi, "l1tEmuPhi/F"); - this -> _tree -> Branch("l1tEmuQual", &_l1tEmuQual, "l1tEmuQual/I"); - this -> _tree -> Branch("l1tEmuIso", &_l1tEmuIso, "l1tEmuIso/I"); - this -> _tree -> Branch("l1tEmuNTT", &_l1tEmuNTT, "l1tEmuNTT/I"); - this -> _tree -> Branch("l1tEmuHasEM", &_l1tEmuHasEM, "l1tEmuHasEM/I"); - this -> _tree -> Branch("l1tEmuIsMerged", &_l1tEmuIsMerged, "l1tEmuIsMerged/I"); - this -> _tree -> Branch("l1tEmuTowerIEta", &_l1tEmuTowerIEta, "l1tEmuTowerIEta/I"); - this -> _tree -> Branch("l1tEmuTowerIPhi", &_l1tEmuTowerIPhi, "l1tEmuTowerIPhi/I"); - this -> _tree -> Branch("l1tEmuRawEt", &_l1tEmuRawEt, "l1tEmuRawEt/I"); - this -> _tree -> Branch("l1tEmuIsoEt", &_l1tEmuIsoEt, "l1tEmuIsoEt/I"); - - this -> _tree -> Branch("l1tPtJet", &_l1tPtJet); - this -> _tree -> Branch("l1tEtaJet", &_l1tEtaJet); - this -> _tree -> Branch("l1tPhiJet", &_l1tPhiJet); - this -> _tree -> Branch("l1tQualJet", &_l1tQualJet); - this -> _tree -> Branch("l1tIsoJet", &_l1tIsoJet); - this -> _tree -> Branch("l1tTowerIEtaJet", &_l1tTowerIEtaJet); - this -> _tree -> Branch("l1tTowerIPhiJet", &_l1tTowerIPhiJet); - this -> _tree -> Branch("l1tRawEtJet", &_l1tRawEtJet); - - this -> _tree -> Branch("hasTriggerMuonType", &_hasTriggerMuonType, "hasTriggerMuonType/O"); - this -> _tree -> Branch("hasTriggerTauType", &_hasTriggerTauType, "hasTriggerTauType/O"); - this -> _tree -> Branch("isMatched", &_isMatched); - this -> _tree -> Branch("isOS", &_isOS, "isOS/O"); - this -> _tree -> Branch("foundJet", &_foundJet, "foundJet/I"); - this -> _tree -> Branch("Nvtx", &_Nvtx, "Nvtx/I"); - - this -> _tree->Branch("JetsNumber",&_numberOfJets,"JetsNumber/I"); - this -> _tree->Branch("jets_px",&_jets_px); - this -> _tree->Branch("jets_py",&_jets_py); - this -> _tree->Branch("jets_pz",&_jets_pz); - this -> _tree->Branch("jets_e",&_jets_e); - this -> _tree->Branch("jets_rawPt", &_jets_rawPt); - this -> _tree->Branch("jets_area", &_jets_area); - this -> _tree->Branch("jets_mT", &_jets_mT); - this -> _tree->Branch("jets_Flavour",&_jets_Flavour); - this -> _tree->Branch("jets_HadronFlavour",&_jets_HadronFlavour); - this -> _tree->Branch("jets_genjetIndex", &_jets_genjetIndex); - this -> _tree->Branch("jets_PUJetID",&_jets_PUJetID); - this -> _tree->Branch("jets_PUJetIDupdated",&_jets_PUJetIDupdated); - this -> _tree->Branch("jets_vtxPt", &_jets_vtxPt); - this -> _tree->Branch("jets_vtxMass", &_jets_vtxMass); - this -> _tree->Branch("jets_vtx3dL", &_jets_vtx3dL); - this -> _tree->Branch("jets_vtxNtrk", &_jets_vtxNtrk); - this -> _tree->Branch("jets_vtx3deL", &_jets_vtx3deL); - this -> _tree->Branch("jets_leadTrackPt", &_jets_leadTrackPt); - this -> _tree->Branch("jets_leptonPtRel", &_jets_leptonPtRel); - this -> _tree->Branch("jets_leptonPt", &_jets_leptonPt); - this -> _tree->Branch("jets_leptonDeltaR", &_jets_leptonDeltaR); - this -> _tree->Branch("jets_chEmEF" , &_jets_chEmEF); - this -> _tree->Branch("jets_chHEF" , &_jets_chHEF); - this -> _tree->Branch("jets_nEmEF" , &_jets_nEmEF); - this -> _tree->Branch("jets_nHEF" , &_jets_nHEF); - this -> _tree->Branch("jets_MUF" , &_jets_MUF); - this -> _tree->Branch("jets_neMult" , &_jets_neMult); - this -> _tree->Branch("jets_chMult" , &_jets_chMult); - this -> _tree->Branch("jets_jecUnc" , &_jets_jecUnc); - - return; -} - - -void Ntuplizer_noTagAndProbe_multipleTaus_AOD::endJob() -{ - return; -} - - -void Ntuplizer_noTagAndProbe_multipleTaus_AOD::endRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - return; -} - - -void Ntuplizer_noTagAndProbe_multipleTaus_AOD::analyze(const edm::Event& iEvent, const edm::EventSetup& eSetup) -{ - this -> Initialize(); - - _indexevents = iEvent.id().event(); - _runNumber = iEvent.id().run(); - _lumi = iEvent.luminosityBlock(); - - //cout<<"EventNumber = "<<_indexevents< resultMuon(new pat::MuonRefVector); - - // search for the tag in the event - edm::Handle tauHandle; - //edm::Handle triggerObjects; - edm::Handle triggerBits; - edm::Handle> jetHandle; - edm::Handle> l1tJetHandle; - edm::Handle > vertexes; - - iEvent.getByToken(this -> _tauTag, tauHandle); - //iEvent.getByToken(this -> _triggerObjects, triggerObjects); - iEvent.getByToken(this -> _triggerBits, triggerBits); - iEvent.getByToken(this -> _JetTag, jetHandle); - iEvent.getByToken(this -> _l1tJetTag, l1tJetHandle); - iEvent.getByToken(this -> _VtxTag,vertexes); - - //! TagAndProbe on HLT taus - // const edm::TriggerNames &names = iEvent.triggerNames(*triggerBits); - // const reco::PFTau tau = (*tauHandle)[0] ; - // const pat::TauRef tau = (*tauHandle)[0] ; - - for(UInt_t iTau = 0 ; iTau < tauHandle->size() ; ++iTau) - { - const reco::PFTau tau = (*tauHandle)[iTau] ; - - this -> _tauPt.push_back( tau. pt() ); - this -> _tauEta.push_back( tau . eta() ); - this -> _tauPhi.push_back( tau . phi() ); - this -> _tauCharge.push_back( tau . charge() ); - this -> _tauDecayMode.push_back( tau . decayMode() ); - - } - - this -> _tauTriggerBitSet.reset(); - - for(BXVector::const_iterator jet = l1tJetHandle -> begin(0); jet != l1tJetHandle -> end(0) ; jet++) - { - this -> _l1tPtJet . push_back(jet -> pt()); - this -> _l1tEtaJet . push_back(jet -> eta()); - this -> _l1tPhiJet . push_back(jet -> phi()); - this -> _l1tIsoJet . push_back(jet -> hwIso()); - //this -> _l1tNTTJet . push_back(jet -> nTT()); - this -> _l1tQualJet . push_back(jet -> hwQual()); - //this -> _l1tHasEMJet . push_back(jet -> hasEM()); - //this -> _l1tIsMergedJet . push_back(jet -> isMerged()); - this -> _l1tTowerIEtaJet . push_back(jet -> towerIEta()); - this -> _l1tTowerIPhiJet . push_back(jet -> towerIPhi()); - this -> _l1tRawEtJet . push_back(jet -> rawEt()); - //this -> _l1tIsoEtJet . push_back(jet -> isoEt()); - } - - edm::Handle< BXVector > L1TauHandle; - iEvent.getByToken(_L1TauTag, L1TauHandle); - - for (l1t::TauBxCollection::const_iterator bx0TauIt = L1TauHandle->begin(0); bx0TauIt != L1TauHandle->end(0) ; bx0TauIt++) - { - this -> _l1tPt .push_back (bx0TauIt->pt()); - this -> _l1tEta .push_back (bx0TauIt->eta()); - this -> _l1tPhi .push_back (bx0TauIt->phi()); - this -> _l1tIso .push_back (bx0TauIt->hwIso()); - this -> _l1tQual .push_back (bx0TauIt->hwQual()); - } - - this -> _Nvtx = vertexes->size(); - - this -> _tauTriggerBits = this -> _tauTriggerBitSet.to_ulong(); - - const edm::View* jets = jetHandle.product(); - edm::ESHandle JetCorParColl; - eSetup.get().get("AK4PFchs",JetCorParColl); - JetCorrectorParameters const & JetCorPar = (*JetCorParColl)["Uncertainty"]; - JetCorrectionUncertainty jecUnc (JetCorPar); - _numberOfJets = FillJet(jets,iEvent, &jecUnc); - - - this -> _tree -> Fill(); - -} - -bool Ntuplizer_noTagAndProbe_multipleTaus_AOD::hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor) { - - const std::vector& eventLabels = obj.filterLabels(); - for (const std::string& filter : filtersToLookFor) - { - //Looking for matching filters - bool found = false; - for (const std::string& label : eventLabels) - { - if (label == filter) - { - found = true; - } - } - if(!found) return false; - } - - return true; -} - -int Ntuplizer_noTagAndProbe_multipleTaus_AOD::FillJet(const edm::View *jets, const edm::Event& event, JetCorrectionUncertainty* jecUnc){ - int nJets=0; - vector > softLeptInJet; // pt, idx - for(edm::View::const_iterator ijet = jets->begin(); ijet!=jets->end();++ijet){ - nJets++; - _jets_px.push_back( (float) ijet->px()); - _jets_py.push_back( (float) ijet->py()); - _jets_pz.push_back( (float) ijet->pz()); - _jets_e.push_back( (float) ijet->energy()); - _jets_mT.push_back( (float) ijet->mt()); - // _jets_Flavour.push_back(ijet->partonFlavour()); - // _jets_HadronFlavour.push_back(ijet->hadronFlavour()); - // _jets_PUJetID.push_back(ijet->userFloat("pileupJetId:fullDiscriminant")); - // _jets_PUJetIDupdated.push_back(ijet->hasUserFloat("pileupJetIdUpdated:fullDiscriminant") ? ijet->userFloat("pileupJetIdUpdated:fullDiscriminant") : -999); - // float vtxPx = ijet->userFloat ("vtxPx"); - // float vtxPy = ijet->userFloat ("vtxPy"); - // _jets_vtxPt. push_back(TMath::Sqrt(vtxPx*vtxPx + vtxPy*vtxPy)); - // _jets_vtxMass.push_back(ijet->userFloat("vtxMass")); - // _jets_vtx3dL. push_back(ijet->userFloat("vtx3DVal")); - // _jets_vtxNtrk.push_back(ijet->userFloat("vtxNtracks")); - // _jets_vtx3deL.push_back(ijet->userFloat("vtx3DSig")); - - // _bdiscr.push_back(ijet->bDiscriminator("pfJetProbabilityBJetTags")); - // _bdiscr2.push_back(ijet->bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags")); - // _bdiscr3.push_back(ijet->bDiscriminator("pfCombinedMVAV2BJetTags")); - - - //PF jet ID - float NHF = 0.; - float NEMF = 0.; - float CHF = 0.; - float MUF = 0.; - float CEMF = 0.; - int NumNeutralParticles = 0; - int chargedMult = 0; - int NumConst = 0; - float CHM = 0.; - // float NHF = ijet->neutralHadronEnergyFraction(); - // float NEMF = ijet->neutralEmEnergyFraction(); - // float CHF = ijet->chargedHadronEnergyFraction(); - // float MUF = ijet->muonEnergyFraction(); - // float CEMF = ijet->chargedEmEnergyFraction(); - // int NumNeutralParticles =ijet->neutralMultiplicity(); - // int chargedMult = ijet->chargedMultiplicity(); - // int NumConst = ijet->chargedMultiplicity()+NumNeutralParticles; - // float CHM = ijet->chargedMultiplicity(); - float absjeta = fabs(ijet->eta()); - - _jets_chEmEF .push_back(CEMF); - _jets_chHEF .push_back(CHF); - _jets_nEmEF .push_back(NEMF); - _jets_nHEF .push_back(NHF); - _jets_chMult .push_back(chargedMult); - _jets_neMult .push_back(NumNeutralParticles); - _jets_MUF .push_back(MUF); - - int jetid=0; - bool looseJetID = false; - bool tightJetID = false; - bool tightLepVetoJetID = false; - - if (absjeta <= 2.7) - { - looseJetID = ( (NHF<0.99 && NEMF<0.99 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.99) || absjeta>2.4) ); - tightLepVetoJetID = ( (NHF<0.90 && NEMF<0.90 && NumConst>1 && MUF<0.8) && ((absjeta<=2.4 && CHF>0 && CHM>0 && CEMF<0.90) || absjeta>2.4) ); - } - else if (absjeta <= 3.0) - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>2 ) ; - tightJetID = looseJetID; - } - else - { - looseJetID = (NEMF<0.90 && NumNeutralParticles>10 ); - tightJetID = looseJetID; - } - if (looseJetID) ++jetid; - if (tightJetID) ++jetid; - if (tightLepVetoJetID) ++jetid; - - _jetID.push_back(jetid); - // float jecFactor = ijet->jecFactor("Uncorrected") ; - // float jetRawPt = jecFactor * ijet->pt(); - // //float jetRawPt2 = ijet->pt() / jecFactor; // this is wrong - // _jets_rawPt.push_back ( jetRawPt ); - // _jets_area.push_back (ijet->jetArea()); - // _jetrawf.push_back(jecFactor); - - // loop on jet contituents to retrieve info for b jet regression - int nDau = ijet -> numberOfDaughters(); - //cout << "JET: " << (ijet - jets->begin()) << " N daught: " << nDau << endl; - - // TLorentzVector vJet (0,0,0,0); - // vJet.SetPxPyPzE (ijet->px(), ijet->py(), ijet->pz(), ijet->energy()); - // TLorentzVector vDau (0,0,0,0); - // TLorentzVector vSum (0,0,0,0); - - float leadTrackPt = 0.; - softLeptInJet.clear(); - for (int iDau = 0; iDau < nDau; ++iDau) - { - // pdg id for packed pf candidates meaning is: - // the particle charge and pdgId: 11, 13, 22 for ele/mu/gamma, 211 for charged hadrons, 130 for neutral hadrons, 1 and 2 for hadronic and em particles in HF. - const reco::Candidate * dau = ijet->daughter(iDau); - if (abs(dau->pdgId()) == 11 || abs(dau->pdgId()) == 13) - { - softLeptInJet.push_back( make_pair(dau->pt(), iDau) ); - } - - if (dau->charge() != 0 ) // tracks -> charged - { - float ptBuf = dau->pt(); - if (ptBuf > leadTrackPt) leadTrackPt = ptBuf; - } - // vDau.SetPxPyPzE (dau->px(), dau->py(), dau->pz(), dau->energy()); - // vSum += vDau; - // cout << " - " << iDau << " pdg: " << dau->pdgId() << " pt: " << dau->pt() << " charge = " << dau->charge() << endl; - } - - //cout << " ## LEAD TRACK PT = " << leadTrackPt << endl; - //cout << " ## jet eta: " << ijet->eta() << endl; - _jets_leadTrackPt.push_back(leadTrackPt); - float leptonPtRel = -1.; - float leptonPt = -1.; - float leptonDeltaR = -1.; - int softLeptIdx = -1; - if (softLeptInJet.size() > 0) - { - sort(softLeptInJet.begin(), softLeptInJet.end()); - softLeptIdx = softLeptInJet.back().second; - } - if (softLeptIdx >= 0) - { - const reco::Candidate * dau = ijet->daughter(softLeptIdx); - leptonPtRel = dau->pt() / ijet->pt() ; - leptonPt = dau->pt() ; - leptonDeltaR = deltaR(*dau, *ijet) ; - } - _jets_leptonPtRel .push_back (leptonPtRel); - _jets_leptonPt .push_back (leptonPt); - _jets_leptonDeltaR.push_back (leptonDeltaR); - - //cout << " --> jet pt, eta, phi: " << vJet.Pt() << " " << vJet.Eta() << " " << vJet.Phi() << endl; - //cout << " --> sum pt, eta, phi: " << vSum.Pt() << " " << vSum.Eta() << " " << vSum.Phi() << endl; - //if (abs(ijet->hadronFlavour()) == 5 ) cout << " ------------ THIS WAS A B JET ------------" << endl; - //cout << "RAW pt: " << jetRawPt << " | " << jetRawPt2 << " --> " << vSum.Pt() << endl; - jecUnc->setJetEta(ijet->eta()); - jecUnc->setJetPt(ijet->pt()); // here you must use the CORRECTED jet pt - _jets_jecUnc.push_back(jecUnc->getUncertainty(true)); - } - - return nJets; -} - - -#include -DEFINE_FWK_MODULE(Ntuplizer_noTagAndProbe_multipleTaus_AOD); - -#endif //NTUPLIZER_NOTAGANDPROBE_MULTIPLETAUS_AOD_H diff --git a/TauTagAndProbe/plugins/SelectionFilter.cc b/TauTagAndProbe/plugins/SelectionFilter.cc new file mode 100644 index 00000000000..6e91cc7db97 --- /dev/null +++ b/TauTagAndProbe/plugins/SelectionFilter.cc @@ -0,0 +1,195 @@ +/*! Apply tau trigger selection vetoes. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "CommonTools/UtilAlgos/interface/TFileService.h" + +#include "FWCore/Common/interface/TriggerNames.h" +#include "FWCore/Framework/interface/EDFilter.h" +#include "FWCore/Framework/interface/Event.h" +#include "FWCore/Framework/interface/EventSetup.h" +#include "FWCore/ParameterSet/interface/ParameterSet.h" +#include "FWCore/ServiceRegistry/interface/Service.h" + +#include "DataFormats/Common/interface/TriggerResults.h" +#include "DataFormats/PatCandidates/interface/Electron.h" +#include "DataFormats/PatCandidates/interface/MET.h" +#include "DataFormats/PatCandidates/interface/Muon.h" +#include "DataFormats/PatCandidates/interface/Jet.h" + +#include "TauTriggerTools/Common/interface/AnalysisTypes.h" +#include "TauTriggerTools/Common/interface/CutTools.h" +#include "TauTriggerTools/Common/interface/PatHelpers.h" + +namespace tau_trigger { + +class SelectionFilter : public edm::EDFilter { +public: + using SelectionHist = root_ext::SmartHistogram; + using Cutter = cuts::Cutter<>; + + SelectionFilter(const edm::ParameterSet& cfg) : + enabled(cfg.getParameter("enabled")), + btagThreshold(cfg.getParameter("btagThreshold")), + mtCut(cfg.getParameter("mtCut")), + metFilters(cfg.getParameter>("metFilters")), + electrons_token(consumes(cfg.getParameter("electrons"))), + muons_token(consumes(cfg.getParameter("muons"))), + jets_token(consumes(cfg.getParameter("jets"))), + met_token(consumes(cfg.getParameter("met"))), + metFiltersResults_token(consumes(cfg.getParameter("metFiltersResults"))), + selection("pre_selection") + { + const edm::ParameterSet& customMetFilters = cfg.getParameterSet("customMetFilters"); + for(const auto& filterName : customMetFilters.getParameterNames()) { + customMetFilters_token[filterName] = + mayConsume(customMetFilters.getParameter(filterName)); + } + produces(); + } + +private: + virtual bool filter(edm::Event& event, const edm::EventSetup&) override + { + if(!enabled) { + event.put(std::make_unique()); + return true; + } + bool result = true; + try { + Cutter cut(&selection); + filter(event, cut); + } catch(cuts::cut_failed&) { + result = false; + } + selection.fill_selection(); + return result; + } + + virtual void endJob() override + { + TFile& file = edm::Service()->file(); + selection.SetOutputDirectory(&file); + selection.WriteRootObject(); + } + + void filter(edm::Event& event, Cutter& cut) + { + cut(true, "tag_path_fired"); + edm::Handle muons; + event.getByToken(muons_token, muons); + + // Find signal muon + std::vector signalMuonCandidates; + for(size_t n = 0; n < muons->size(); ++n) { + const pat::Muon& muon = muons->at(n); + if(muon.polarP4().pt() > 24 && std::abs(muon.polarP4().eta()) < 2.1 && muon.isMediumMuon()) + signalMuonCandidates.emplace_back(muons, n); + } + cut(!signalMuonCandidates.empty(), "signal_muon"); + static const auto muonComparitor = [](const pat::MuonRef& a, const pat::MuonRef& b) { + const double iso_a = MuonIsolation(*a), iso_b = MuonIsolation(*b); + if(iso_a != iso_b) return iso_a < iso_b; + return a->polarP4().pt() > b->polarP4().pt(); + }; + std::sort(signalMuonCandidates.begin(), signalMuonCandidates.end(), muonComparitor); + const pat::Muon& signalMuon = *signalMuonCandidates.at(0); + + // Apply third lepton veto + bool has_other_muon = false; + for(const pat::Muon& muon : *muons) { + if(&muon != &signalMuon && muon.isLooseMuon() && muon.polarP4().pt() > 10 + && std::abs(muon.polarP4().eta()) < 2.4 && MuonIsolation(muon) < 0.3) { + has_other_muon = true; + break; + } + } + cut(!has_other_muon, "muon_veto"); + + edm::Handle electrons; + event.getByToken(electrons_token, electrons); + bool has_ele = false; + for(const pat::Electron& ele : *electrons) { + if(ele.polarP4().pt() > 10 && std::abs(ele.polarP4().eta()) < 2.5 + && ele.electronID("mvaEleID-Fall17-iso-V2-wpLoose") > 0.5) { + has_ele = true; + break; + } + } + cut(!has_ele, "ele_veto"); + + // Apply MT cut (if enabled) + if(mtCut > 0) { + edm::Handle metCollection; + event.getByToken(met_token, metCollection); + const pat::MET& met = metCollection->at(0); + const analysis::LorentzVectorM met_p4(met.pt(), 0, met.phi(), 0); + cut(Calculate_MT(signalMuon.polarP4(), met_p4) < mtCut, "mt_cut"); + } + + // Apply b tag veto (if enabled) + if(btagThreshold > 0) { + edm::Handle jets; + event.getByToken(jets_token, jets); + bool has_bjet = false; + for(const pat::Jet& jet : *jets) { + const auto btag = jet.bDiscriminator("pfDeepFlavourJetTags:probb") + + jet.bDiscriminator("pfDeepFlavourJetTags:probbb") + + jet.bDiscriminator("pfDeepFlavourJetTags:problepb"); + if(jet.polarP4().pt() > 20 && std::abs(jet.polarP4().eta()) < 2.4 && btag > btagThreshold) { + has_bjet = true; + break; + } + } + cut(!has_bjet, "btag_veto"); + } + + // Apply MET filters + edm::Handle metFiltersResults; + event.getByToken(metFiltersResults_token, metFiltersResults); + const edm::TriggerNames& metFilterNames = event.triggerNames(*metFiltersResults); + const auto passFilter = [&](const std::string& filter) { + auto iter = customMetFilters_token.find(filter); + if(iter != customMetFilters_token.end()) { + edm::Handle result; + event.getByToken(iter->second, result); + return *result; + } + const size_t index = metFilterNames.triggerIndex(filter); + if(index == metFilterNames.size()) + throw cms::Exception("TauTriggerSelectionFilter") << "MET filter '" << filter << "' not found."; + return metFiltersResults->accept(index); + }; + bool pass_met_filters = true; + for(const std::string& metFilter : metFilters) { + if(!passFilter(metFilter)) { + pass_met_filters = false; + break; + } + } + cut(pass_met_filters, "met_filters"); + + // Put the signal muon into the event + auto signalMuonOutput = std::make_unique(); + signalMuonOutput->push_back(signalMuonCandidates.at(0)); + event.put(std::move(signalMuonOutput)); + } + +private: + const bool enabled; + const double btagThreshold, mtCut; + const std::vector metFilters; + + edm::EDGetTokenT electrons_token; + edm::EDGetTokenT muons_token; + edm::EDGetTokenT jets_token; + edm::EDGetTokenT met_token; + edm::EDGetTokenT metFiltersResults_token; + std::map> customMetFilters_token; + SelectionHist selection; +}; + +} // namespace tau_trigger + +#include "FWCore/Framework/interface/MakerMacros.h" +using TauTriggerSelectionFilter = tau_trigger::SelectionFilter; +DEFINE_FWK_MODULE(TauTriggerSelectionFilter); diff --git a/TauTagAndProbe/plugins/SummaryProducer.cc b/TauTagAndProbe/plugins/SummaryProducer.cc new file mode 100644 index 00000000000..3b79bb8dcbc --- /dev/null +++ b/TauTagAndProbe/plugins/SummaryProducer.cc @@ -0,0 +1,129 @@ +/*! Creates tuple for tau analysis. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "Compression.h" + +#include "CommonTools/UtilAlgos/interface/TFileService.h" + +#include "DataFormats/VertexReco/interface/Vertex.h" + +#include "FWCore/Framework/interface/stream/EDProducer.h" +#include "FWCore/Framework/interface/Event.h" +#include "FWCore/Framework/interface/EventSetup.h" +#include "FWCore/ParameterSet/interface/ParameterSet.h" +#include "FWCore/ServiceRegistry/interface/Service.h" + +#include "SimDataFormats/GeneratorProducts/interface/GenEventInfoProduct.h" +#include "SimDataFormats/PileupSummaryInfo/interface/PileupSummaryInfo.h" + +#include "TauTriggerTools/Common/interface/GenTruthTools.h" +#include "TauTriggerTools/Common/interface/TriggerDescriptor.h" +#include "TauTriggerTools/TauTagAndProbe/interface/SummaryTuple.h" + +namespace tau_trigger { + +using namespace analysis; + +class SummaryProducer : public edm::stream::EDProducer> { +public: + SummaryProducer(const edm::ParameterSet& cfg, const SummaryProducerData* globalData) : + isMC(cfg.getParameter("isMC")), + genEvent_token(mayConsume(cfg.getParameter("genEvent"))), + puInfo_token(mayConsume>(cfg.getParameter("puInfo"))), + vertices_token(mayConsume >(cfg.getParameter("vertices"))), + data(*globalData) + { + produces(); + } + + static std::unique_ptr initializeGlobalCache(const edm::ParameterSet& cfg) + { + TFile& file = edm::Service()->file(); + file.SetCompressionAlgorithm(ROOT::kLZ4); + file.SetCompressionLevel(4); + const bool isMC = cfg.getParameter("isMC"); + auto data = std::make_unique(file, isMC); + SummaryTuple& summaryTuple = *data->getSummaryTuple(); + summaryTuple().numberOfProcessedEvents = 0; + summaryTuple().totalGenEventWeight = 0; + TriggerDescriptorCollection hltPaths(cfg.getParameter("hltPaths")); + for(unsigned n = 0; n < hltPaths.size(); ++n) { + summaryTuple().trigger_index.push_back(n); + summaryTuple().trigger_pattern.push_back(hltPaths.at(n).path); + } + return data; + } + + static void globalEndJob(SummaryProducerData* data) + { + SummaryProducerData::LockGuard lock(data->getMutex()); + if(data->getExpressTuple()) + data->getExpressTuple()->Write(); + SummaryTuple& summaryTuple = *data->getSummaryTuple(); + const auto& filters = data->getFilters(); + for(const auto& entry : filters) { + summaryTuple().filter_name.push_back(entry.first); + summaryTuple().filter_hash.push_back(entry.second); + } + summaryTuple().exeTime = data->getElapsedTime(); + summaryTuple.Fill(); + summaryTuple.Write(); + } + +private: + static constexpr float default_value = ::tau_trigger::DefaultFillValue(); + static constexpr int default_int_value = ::tau_trigger::DefaultFillValue(); + + virtual void produce(edm::Event& event, const edm::EventSetup&) override + { + event.put(std::make_unique(true)); + + SummaryProducerData::LockGuard lock(data.getMutex()); + + SummaryTuple& summaryTuple = *data.getSummaryTuple(); + summaryTuple().numberOfProcessedEvents++; + + float genWeight = default_value; + int npu = default_int_value; + if(isMC) { + edm::Handle genEvent; + event.getByToken(genEvent_token, genEvent); + genWeight = static_cast(genEvent->weight()); + summaryTuple().totalGenEventWeight += genWeight; + + edm::Handle> puInfo; + event.getByToken(puInfo_token, puInfo); + npu = gen_truth::GetNumberOfPileUpInteractions(puInfo); + } + + if(data.getExpressTuple()) { + ExpressTuple& expressTuple = *data.getExpressTuple(); + expressTuple().run = event.id().run(); + expressTuple().lumi = event.id().luminosityBlock(); + expressTuple().evt = event.id().event(); + + edm::Handle> vertices; + event.getByToken(vertices_token, vertices); + expressTuple().npv = static_cast(vertices->size()); + expressTuple().genEventWeight = genWeight; + expressTuple().npu = npu; + + expressTuple.Fill(); + } + } + +private: + const bool isMC; + + edm::EDGetTokenT genEvent_token; + edm::EDGetTokenT> puInfo_token; + edm::EDGetTokenT> vertices_token; + + const SummaryProducerData& data; +}; + +} // namespace tau_trigger + +#include "FWCore/Framework/interface/MakerMacros.h" +using TauTriggerSummaryTupleProducer = tau_trigger::SummaryProducer; +DEFINE_FWK_MODULE(TauTriggerSummaryTupleProducer); diff --git a/TauTagAndProbe/plugins/TauTagAndProbeFilter.cc b/TauTagAndProbe/plugins/TauTagAndProbeFilter.cc deleted file mode 100644 index 24e26472737..00000000000 --- a/TauTagAndProbe/plugins/TauTagAndProbeFilter.cc +++ /dev/null @@ -1,166 +0,0 @@ -#ifndef TAUTAGANDPROBEFILTER_H -#define TAUTAGANDPROBEFILTER_H - -#include "FWCore/Framework/interface/EDFilter.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include - -using namespace edm; -using namespace std; -// using namespace reco; - - -class TauTagAndProbeFilter : public edm::EDFilter { - - public: - TauTagAndProbeFilter(const edm::ParameterSet &); - ~TauTagAndProbeFilter(); - - private: - bool filter(edm::Event &, edm::EventSetup const&); - - float ComputeMT(math::XYZTLorentzVector visP4, const pat::MET& met); - - EDGetTokenT _tausTag; - EDGetTokenT _muonsTag; - EDGetTokenT _metTag; - bool _useMassCuts; - EDGetTokenT > _electronsTag; - edm::EDGetTokenT > _eleLooseIdMapTag; - bool _electronVeto; - EDGetTokenT _bjetsTag; -}; - -TauTagAndProbeFilter::TauTagAndProbeFilter(const edm::ParameterSet & iConfig) : -_tausTag (consumes (iConfig.getParameter("taus"))), -_muonsTag (consumes (iConfig.getParameter("muons"))), -_metTag (consumes (iConfig.getParameter("met"))), -_electronsTag (consumes > (iConfig.getParameter("electrons"))), -_eleLooseIdMapTag (consumes >(iConfig.getParameter("eleLooseIdMap"))), -_bjetsTag (consumes (iConfig.getParameter("bjets"))) -{ - produces (); // probe - produces (); // tag - _useMassCuts = iConfig.getParameter("useMassCuts"); - _electronVeto = iConfig.getParameter("eleVeto"); -} - -TauTagAndProbeFilter::~TauTagAndProbeFilter() -{} - -bool TauTagAndProbeFilter::filter(edm::Event & iEvent, edm::EventSetup const& iSetup) -{ - - std::unique_ptr resultMuon ( new pat::MuonRefVector ); - std::unique_ptr resultTau ( new pat::TauRefVector ); - - // Veto events with loose electrons - if(_electronVeto){ - Handle > electrons; - iEvent.getByToken(_electronsTag, electrons); - Handle > loose_id_decisions; - iEvent.getByToken(_eleLooseIdMapTag, loose_id_decisions); - - for(unsigned int i = 0; i< electrons->size(); ++i){ - - const auto ele = electrons->ptrAt(i); - int isLooseID = (*loose_id_decisions)[ele]; - if(isLooseID && ele->p4().Pt()>10 && fabs(ele->p4().Eta())<2.5) - return false; - - } - - } - - // --------------------- search for the tag in the event -------------------- - Handle muonHandle; - iEvent.getByToken (_muonsTag, muonHandle); - - const pat::MuonRef mu = (*muonHandle)[0] ; - - //--------------------- get the met for mt computation etc. ----------------- - Handle metHandle; - iEvent.getByToken (_metTag, metHandle); - const pat::MET& met = (*metHandle)[0]; - - float mt = ComputeMT (mu->p4(), met); - - if (mt >= 30 && _useMassCuts) return false; // reject W+jets - - - // ------------------- get Taus ------------------------------- - Handle tauHandle; - iEvent.getByToken (_tausTag, tauHandle); - if (tauHandle->size() < 1) return false; - - vector> tausIdxPtVec; - for (uint itau = 0; itau < tauHandle->size(); ++itau) - { - const pat::TauRef tau = (*tauHandle)[itau] ; - math::XYZTLorentzVector pSum = mu->p4() + tau->p4(); - if (_useMassCuts && (pSum.mass() <= 40 || pSum.mass() >= 80)) continue; // visible mass in (40, 80) - if (deltaR(*tau, *mu) < 0.5) continue; - - // min iso - // float isoMVA = tau->tauID("byIsolationMVArun2v1DBoldDMwLTraw"); - float isoMVA = tau->tauID("byIsolationMVArun2017v2DBoldDMwLTraw2017"); - tausIdxPtVec.push_back(make_pair(isoMVA, itau)); - } - - - pat::TauRef tau; - - if (tausIdxPtVec.size() == 0) return false; //No tau found - if (tausIdxPtVec.size() > 1) sort (tausIdxPtVec.begin(), tausIdxPtVec.end()); //Sort if multiple taus - int tauIdx = tausIdxPtVec.back().second; // min iso --> max MVA score - tau = (*tauHandle)[tauIdx]; - - - // ----------------- b-jets veto --------------------- - Handle bjetHandle; - iEvent.getByToken (_bjetsTag, bjetHandle); - - for(unsigned int ijet = 0; ijet < bjetHandle->size(); ijet++){ - - const pat::JetRef bjet = (*bjetHandle)[ijet]; - if( deltaR(*mu,*bjet)>0.5 && deltaR(*tau,*bjet)>0.5 ) return false; - - } - - - resultTau->push_back (tau); - resultMuon->push_back (mu); - iEvent.put(std::move(resultMuon)); - iEvent.put(std::move(resultTau)); - - return true; -} - -float TauTagAndProbeFilter::ComputeMT (math::XYZTLorentzVector visP4, const pat::MET& met) -{ - math::XYZTLorentzVector METP4 (met.pt()*TMath::Cos(met.phi()), met.pt()*TMath::Sin(met.phi()), 0, met.pt()); - float scalSum = met.pt() + visP4.pt(); - - math::XYZTLorentzVector vecSum (visP4); - vecSum += METP4; - float vecSumPt = vecSum.pt(); - return sqrt (scalSum*scalSum - vecSumPt*vecSumPt); -} - -#include -DEFINE_FWK_MODULE(TauTagAndProbeFilter); - -#endif diff --git a/TauTagAndProbe/plugins/TupleProducer.cc b/TauTagAndProbe/plugins/TupleProducer.cc new file mode 100644 index 00000000000..d9c1cf88bae --- /dev/null +++ b/TauTagAndProbe/plugins/TupleProducer.cc @@ -0,0 +1,340 @@ +/*! Creates tuple for tau analysis. +This file is part of https://github.com/cms-tau-pog/TauTriggerTools. */ + +#include "Compression.h" + +#include "CommonTools/UtilAlgos/interface/TFileService.h" + +#include "DataFormats/L1Trigger/interface/Tau.h" +#include "DataFormats/PatCandidates/interface/Jet.h" +#include "DataFormats/PatCandidates/interface/MET.h" +#include "DataFormats/PatCandidates/interface/Muon.h" +#include "DataFormats/PatCandidates/interface/PackedCandidate.h" +#include "DataFormats/PatCandidates/interface/Tau.h" + +#include "FWCore/Common/interface/TriggerNames.h" +#include "FWCore/Framework/interface/stream/EDProducer.h" +#include "FWCore/Framework/interface/Event.h" +#include "FWCore/Framework/interface/EventSetup.h" +#include "FWCore/ParameterSet/interface/ParameterSet.h" +#include "FWCore/ServiceRegistry/interface/Service.h" + +#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" + +#include "SimDataFormats/GeneratorProducts/interface/GenEventInfoProduct.h" +#include "SimDataFormats/PileupSummaryInfo/interface/PileupSummaryInfo.h" + +#include "TauTriggerTools/Common/interface/CutTools.h" +#include "TauTriggerTools/Common/interface/GenTruthTools.h" +#include "TauTriggerTools/Common/interface/PatHelpers.h" +#include "TauTriggerTools/Common/interface/TriggerDescriptor.h" +#include "TauTriggerTools/TauTagAndProbe/interface/SummaryTuple.h" + +namespace tau_trigger { + +struct TupleProducerData { + using Mutex = EventTuple::Mutex; + using LockGuard = std::lock_guard; + using SelectionHist = root_ext::SmartHistogram; + + std::unique_ptr eventTuple; + std::unique_ptr selection; + + TupleProducerData(TFile& file) + { + eventTuple = std::make_unique("events", &file, false); + selection = std::make_unique("producer_selection"); + selection->SetOutputDirectory(&file); + } +}; + +class TupleProducer : public edm::stream::EDProducer> { +public: + using SelectionHist = TupleProducerData::SelectionHist; + using Cutter = cuts::Cutter<>; + using exception = analysis::exception; + + TupleProducer(const edm::ParameterSet& cfg, const TupleProducerData* producerData) : + btagThreshold(cfg.getParameter("btagThreshold")), + isMC(cfg.getParameter("isMC")), + triggerProcess(cfg.getParameter("triggerProcess")), + genEvent_token(consumeIT(cfg, "genEvent", false)), + genParticles_token(consumeIT(cfg, "genParticles", false)), + puInfo_token(consumeIT>(cfg, "puInfo", false)), + vertices_token(consumeIT>(cfg, "vertices")), + signalMuon_token(consumeIT(cfg, "signalMuon")), + taus_token(consumeIT(cfg, "taus")), + jets_token(consumeIT(cfg, "jets")), + met_token(consumeIT(cfg, "met")), + triggerResults_token(consumes(edm::InputTag("TriggerResults", "", triggerProcess))), + triggerObjects_token(consumeIT(cfg, "triggerObjects")), + l1Taus_token(consumeIT(cfg, "l1Taus")), + triggerDescriptors(cfg.getParameter("hltPaths")), + data(*producerData) + { + produces(); + } + + static std::unique_ptr initializeGlobalCache(const edm::ParameterSet&) + { + TFile& file = edm::Service()->file(); + file.SetCompressionAlgorithm(ROOT::kLZ4); + file.SetCompressionLevel(4); + return std::make_unique(file); + } + + static void globalEndJob(TupleProducerData* data) + { + TupleProducerData::LockGuard lock(data->eventTuple->GetMutex()); + data->eventTuple->Write(); + data->selection->WriteRootObject(); + } + +private: + static constexpr float default_value = ::tau_trigger::DefaultFillValue(); + static constexpr int default_int_value = ::tau_trigger::DefaultFillValue(); + static constexpr double deltaR2Thr = 0.5*0.5; + + template + edm::EDGetTokenT consumeIT(const edm::ParameterSet& cfg, const std::string& name, bool always = true) + { + if(always) + return consumes(cfg.getParameter(name)); + return mayConsume(cfg.getParameter(name)); + } + + virtual void beginRun(const edm::Run& run, const edm::EventSetup& setup) + { + HLTConfigProvider hltConfigProvider; + bool changedConfig; + if(!hltConfigProvider.init(run, setup, triggerProcess, changedConfig)) + throw exception("Unable to initialize HLTConfigProvider."); + triggerDescriptors.updateGlobalIndices(hltConfigProvider.triggerNames()); + } + + virtual void produce(edm::Event& event, const edm::EventSetup&) override + { + event.put(std::make_unique(true)); + TupleProducerData::LockGuard lock(data.eventTuple->GetMutex()); + try { + Cutter cut(data.selection.get()); + fillTuple(event, cut); + } catch(cuts::cut_failed&) {} + data.selection->fill_selection(); + } + + void fillTuple(edm::Event& event, Cutter& cut) + { + EventTuple& eventTuple = *data.eventTuple; + cut(true, "total"); + eventTuple().run = event.id().run(); + eventTuple().lumi = event.id().luminosityBlock(); + eventTuple().evt = event.id().event(); + + edm::Handle> vertices; + event.getByToken(vertices_token, vertices); + eventTuple().npv = static_cast(vertices->size()); + + edm::Handle> hGenParticles; + + if(isMC) { + edm::Handle genEvent; + event.getByToken(genEvent_token, genEvent); + eventTuple().genEventWeight = static_cast(genEvent->weight()); + + edm::Handle> puInfo; + event.getByToken(puInfo_token, puInfo); + eventTuple().npu = gen_truth::GetNumberOfPileUpInteractions(puInfo); + + event.getByToken(genParticles_token, hGenParticles); + } + + auto genParticles = hGenParticles.isValid() ? hGenParticles.product() : nullptr; + std::vector genLeptons; + if(genParticles) + genLeptons = gen_truth::CollectGenLeptons(*genParticles); + + edm::Handle signalMuonCollection; + event.getByToken(signalMuon_token, signalMuonCollection); + const pat::Muon* muon = signalMuonCollection.isValid() && !signalMuonCollection->empty() + ? &(*signalMuonCollection->at(0)) : nullptr; + gen_truth::LeptonMatchResult gen_muon; + LorentzVectorM muon_ref_p4; + bool has_muon = false; + if(muon) { + gen_muon = gen_truth::LeptonGenMatch(muon->polarP4(), genLeptons); + muon_ref_p4 = muon->polarP4(); + has_muon = true; + } else { + gen_muon = SelectGenLeg(genLeptons, false); + if(gen_muon.match != GenLeptonMatch::NoMatch) { + muon_ref_p4 = gen_muon.visible_p4; + has_muon = true; + } + } + cut(has_muon, "has_muon"); + + edm::Handle triggerResults; + event.getByToken(triggerResults_token, triggerResults); + const edm::TriggerNames& triggerNames = event.triggerNames(*triggerResults); + edm::Handle triggerObjects; + event.getByToken(triggerObjects_token, triggerObjects); + edm::Handle l1Taus; + event.getByToken(l1Taus_token, l1Taus); + + const auto muonTriggerMatch = triggerDescriptors.matchTriggerObjects(*triggerResults, *triggerObjects, + muon_ref_p4, triggerNames.triggerNames(), deltaR2Thr, true, false); + cut(!muonTriggerMatch.matchResults.empty(), "tag_trig_match"); + + edm::Handle metCollection; + event.getByToken(met_token, metCollection); + const pat::MET& met = metCollection->at(0); + const LorentzVectorM met_p4(met.pt(), 0, met.phi(), 0); + eventTuple().met_pt = static_cast(met.pt()); + eventTuple().met_phi = static_cast(met.phi()); + eventTuple().muon_pt = muon ? static_cast(muon->polarP4().pt()) : default_value; + eventTuple().muon_eta = muon ? static_cast(muon->polarP4().eta()) : default_value; + eventTuple().muon_phi = muon ? static_cast(muon->polarP4().phi()) : default_value; + eventTuple().muon_mass = muon ? static_cast(muon->polarP4().mass()) : default_value; + eventTuple().muon_charge = muon ? muon->charge() : default_int_value; + eventTuple().muon_iso = muon ? MuonIsolation(*muon) : default_value; + eventTuple().muon_mt = muon ? Calculate_MT(muon->polarP4(), LorentzVectorM(met.pt(), 0, met.phi(), 0)) + : default_value; + const bool has_gen_muon = gen_muon.match != GenLeptonMatch::NoMatch; + eventTuple().muon_gen_match = static_cast(gen_muon.match); + eventTuple().muon_gen_charge = has_gen_muon ? gen_muon.gen_particle_lastCopy->charge() : default_int_value; + eventTuple().muon_gen_vis_pt = has_gen_muon ? static_cast(gen_muon.visible_p4.pt()) : default_value; + eventTuple().muon_gen_vis_eta = has_gen_muon ? static_cast(gen_muon.visible_p4.eta()) : default_value; + eventTuple().muon_gen_vis_phi = has_gen_muon ? static_cast(gen_muon.visible_p4.phi()) : default_value; + eventTuple().muon_gen_vis_mass = has_gen_muon ? static_cast(gen_muon.visible_p4.mass()) : default_value; + + edm::Handle taus; + event.getByToken(taus_token, taus); + + edm::Handle jets; + event.getByToken(jets_token, jets); + + const auto& selected_taus = CollectTaus(muon_ref_p4, *taus, genLeptons, deltaR2Thr); + cut(!selected_taus.empty(), "has_tau"); + bool has_good_tau = false; + for(const auto& tau_entry : selected_taus) { + const pat::Tau* tau = tau_entry.reco_tau; + const auto& gen_tau = tau_entry.gen_tau; + const bool has_gen_tau = gen_tau.match != GenLeptonMatch::NoMatch; + const LorentzVectorM tau_ref_p4 = tau ? tau->polarP4() : LorentzVectorM(gen_tau.visible_p4); + if(!tau && !has_gen_tau) + throw exception("Inconsistent tau entry"); + if(btagThreshold > 0 && !PassBtagVeto(muon_ref_p4, tau_ref_p4, *jets, btagThreshold, deltaR2Thr)) continue; + + eventTuple().tau_sel = tau_entry.selection; + eventTuple().tau_pt = tau ? static_cast(tau->polarP4().pt()) : default_value; + eventTuple().tau_eta = tau ? static_cast(tau->polarP4().eta()) : default_value; + eventTuple().tau_phi = tau ? static_cast(tau->polarP4().phi()) : default_value; + eventTuple().tau_mass = tau ? static_cast(tau->polarP4().mass()) : default_value; + eventTuple().tau_charge = tau ? tau->charge() : default_int_value; + + eventTuple().tau_gen_match = static_cast(gen_tau.match); + eventTuple().tau_gen_charge = has_gen_tau ? gen_tau.gen_particle_firstCopy->charge() : default_int_value; + eventTuple().tau_gen_vis_pt = has_gen_tau ? static_cast(gen_tau.visible_p4.pt()) : default_value; + eventTuple().tau_gen_vis_eta = has_gen_tau ? static_cast(gen_tau.visible_p4.eta()) : default_value; + eventTuple().tau_gen_vis_phi = has_gen_tau ? static_cast(gen_tau.visible_p4.phi()) : default_value; + eventTuple().tau_gen_vis_mass = has_gen_tau ? static_cast(gen_tau.visible_p4.mass()) : default_value; + eventTuple().tau_gen_rad_pt = has_gen_tau ? static_cast(gen_tau.visible_rad_p4.pt()) : default_value; + eventTuple().tau_gen_rad_eta = has_gen_tau ? static_cast(gen_tau.visible_rad_p4.eta()) + : default_value; + eventTuple().tau_gen_rad_phi = has_gen_tau ? static_cast(gen_tau.visible_rad_p4.phi()) + : default_value; + eventTuple().tau_gen_rad_energy = has_gen_tau ? static_cast(gen_tau.visible_rad_p4.energy()) + : default_value; + eventTuple().tau_gen_n_charged_hadrons = has_gen_tau ? static_cast(gen_tau.n_charged_hadrons) + : default_int_value; + eventTuple().tau_gen_n_neutral_hadrons = has_gen_tau ? static_cast(gen_tau.n_neutral_hadrons) + : default_int_value; + eventTuple().tau_gen_n_gammas = has_gen_tau ? static_cast(gen_tau.n_gammas) : default_int_value; + eventTuple().tau_gen_n_gammas_rad = has_gen_tau ? static_cast(gen_tau.n_gammas_rad) + : default_int_value; + + eventTuple().tau_decayMode = tau ? tau->decayMode() : default_int_value; + eventTuple().tau_oldDecayModeFinding = tau ? tau->tauID("decayModeFinding") > 0.5f : default_int_value; + + for(const auto& tau_id_entry : tau_id::GetTauIdDescriptors()) { + const auto& desc = tau_id_entry.second; + desc.FillTuple(eventTuple, tau, default_value); + } + + eventTuple().tau_dxy = tau ? tau->dxy() : default_value; + eventTuple().tau_dxy_error = tau ? tau->dxy_error() : default_value; + eventTuple().tau_ip3d = tau ? tau->ip3d() : default_value; + eventTuple().tau_ip3d_error = tau ? tau->ip3d_error() : default_value; + + const pat::PackedCandidate* leadChargedHadrCand = tau + ? dynamic_cast(tau->leadChargedHadrCand().get()) + : nullptr; + eventTuple().tau_dz = leadChargedHadrCand ? leadChargedHadrCand->dz() : default_value; + eventTuple().tau_dz_error = leadChargedHadrCand && leadChargedHadrCand->hasTrackDetails() + ? leadChargedHadrCand->dzError() : default_value; + + eventTuple().vis_mass = static_cast((muon_ref_p4 + tau_ref_p4).mass()); + + const auto tauTriggerMatch = triggerDescriptors.matchTriggerObjects(*triggerResults, *triggerObjects, + tau_ref_p4, triggerNames.triggerNames(), deltaR2Thr, true, true); + eventTuple().hlt_accept = tauTriggerMatch.accept.to_ullong(); + eventTuple().hlt_acceptAndMatch = tauTriggerMatch.acceptAndMatch.to_ullong(); + for(const auto& match_entry : tauTriggerMatch.matchResults) { + const auto& hlt_obj = triggerObjects->at(match_entry.second.hltObjIndex); + eventTuple().hltObj_types.push_back(match_entry.second.objType); + eventTuple().hltObj_pt.push_back(static_cast(hlt_obj.polarP4().pt())); + eventTuple().hltObj_eta.push_back(static_cast(hlt_obj.polarP4().eta())); + eventTuple().hltObj_phi.push_back(static_cast(hlt_obj.polarP4().phi())); + eventTuple().hltObj_mass.push_back(static_cast(hlt_obj.polarP4().mass())); + eventTuple().hltObj_hasPathName.push_back(match_entry.second.hasPathName.to_ullong()); + eventTuple().hltObj_isBestMatch.push_back(match_entry.second.isBestMatch.to_ullong()); + eventTuple().hltObj_miniAODIndex.push_back(match_entry.second.hltObjIndex); + const size_t hltObj_index = eventTuple().hltObj_pt.size() - 1; + for(const std::string& filter : match_entry.second.filters) { + eventTuple().filter_hltObj.push_back(hltObj_index); + const uint32_t hash = SummaryProducerData::GetData().getFilterHash(filter); + eventTuple().filter_hash.push_back(hash); + } + } + + auto l1Tau = MatchL1Taus(tau_ref_p4, *l1Taus, deltaR2Thr, 0); + eventTuple().l1Tau_pt = l1Tau ? static_cast(l1Tau->polarP4().pt()) : default_value; + eventTuple().l1Tau_eta = l1Tau ? static_cast(l1Tau->polarP4().eta()) : default_value; + eventTuple().l1Tau_phi = l1Tau ? static_cast(l1Tau->polarP4().phi()) : default_value; + eventTuple().l1Tau_mass = l1Tau ? static_cast(l1Tau->polarP4().mass()) : default_value; + eventTuple().l1Tau_hwIso = l1Tau ? l1Tau->hwIso() : default_int_value; + eventTuple().l1Tau_hwQual = l1Tau ? l1Tau->hwQual() : default_int_value; + + has_good_tau = true; + eventTuple.Fill(); + } + cut(has_good_tau, "btag_veto"); + } + +private: + const double btagThreshold; + const bool isMC; + const std::string triggerProcess; + + edm::EDGetTokenT genEvent_token; + edm::EDGetTokenT> genParticles_token; + edm::EDGetTokenT> puInfo_token; + edm::EDGetTokenT> vertices_token; + edm::EDGetTokenT signalMuon_token; + edm::EDGetTokenT taus_token; + edm::EDGetTokenT jets_token; + edm::EDGetTokenT met_token; + edm::EDGetTokenT triggerResults_token; + edm::EDGetTokenT triggerObjects_token; + edm::EDGetTokenT l1Taus_token; + + TriggerDescriptorCollection triggerDescriptors; + const TupleProducerData& data; +}; + +} // namespace tau_trigger + +#include "FWCore/Framework/interface/MakerMacros.h" +using TauTriggerTupleProducer = tau_trigger::TupleProducer; +DEFINE_FWK_MODULE(TauTriggerTupleProducer); diff --git a/TauTagAndProbe/plugins/ZeroBias.cc b/TauTagAndProbe/plugins/ZeroBias.cc deleted file mode 100644 index 94432c92b84..00000000000 --- a/TauTagAndProbe/plugins/ZeroBias.cc +++ /dev/null @@ -1,1214 +0,0 @@ -#ifndef ZeroBias_H -#define ZeroBias_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -#include "FWCore/Framework/interface/EDAnalyzer.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include -#include "FWCore/ServiceRegistry/interface/Service.h" -#include "FWCore/Common/interface/TriggerNames.h" -#include "HLTrigger/HLTcore/interface/HLTConfigProvider.h" -#include "DataFormats/L1Trigger/interface/Tau.h" -#include "DataFormats/L1Trigger/interface/Jet.h" -#include "DataFormats/L1Trigger/interface/EGamma.h" -#include "DataFormats/L1Trigger/interface/Muon.h" -#include "DataFormats/VertexReco/interface/Vertex.h" -#include "DataFormats/JetReco/interface/CaloJet.h" -#include "DataFormats/BTauReco/interface/JetTag.h" -#include "DataFormats/TrackReco/interface/TrackFwd.h" -#include "DataFormats/ParticleFlowCandidate/interface/PFCandidateFwd.h" -#include "DataFormats/JetReco/interface/PFJetCollection.h" -#include "DataFormats/TauReco/interface/PFTau.h" -#include "DataFormats/TauReco/interface/PFTauFwd.h" -#include "RecoTauTag/RecoTau/interface/RecoTauCommonUtilities.h" -#include -#include "DataFormats/TauReco/interface/PFJetChargedHadronAssociation.h" -#include "DataFormats/TauReco/interface/JetPiZeroAssociation.h" - -#include "tParameterSet.h" - -#include "CommonTools/UtilAlgos/interface/TFileService.h" - - - -//Set this variable to decide the number of triggers that you want to check simultaneously -#define NUMBER_OF_MAXIMUM_TRIGGERS 64 - - -/* - ██████ ███████ ██████ ██ █████ ██████ █████ ████████ ██ ██████ ███ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ █████ ██ ██ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██████ ███████ ██████ ███████ ██ ██ ██ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -class ZeroBias : public edm::EDAnalyzer { -public: - /// Constructor - explicit ZeroBias(const edm::ParameterSet&); - /// Destructor - virtual ~ZeroBias(); - -private: - //----edm control--- - virtual void beginJob() ; - virtual void beginRun(edm::Run const&, edm::EventSetup const&); - virtual void analyze(const edm::Event&, const edm::EventSetup&); - virtual void endJob(); - virtual void endRun(edm::Run const&, edm::EventSetup const&); - void Initialize(); - bool hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor); - Long64_t FindTriggerBit(const vector foundPaths, const vector indexOfPaths, const edm::Handle& triggerResults); - - TTree *_tree; - TTree *_triggerNamesTree; - std::string _treeName; - // ------------------------------------- - // variables to be filled in output tree - ULong64_t _indexevents; - Int_t _runNumber; - Int_t _lumi; - unsigned long _EventTriggerBits; - - std::vector _l1tQual; - std::vector _l1tPt; - std::vector _l1tEta; - std::vector _l1tPhi; - std::vector _l1tIso; - - std::vector _l1tEmuQual; - std::vector _l1tEmuPt; - std::vector _l1tEmuEta; - std::vector _l1tEmuPhi; - std::vector _l1tEmuIso; - std::vector _l1tEmuNTT; - std::vector _l1tEmuHasEM; - std::vector _l1tEmuIsMerged; - std::vector _l1tEmuTowerIEta; - std::vector _l1tEmuTowerIPhi; - std::vector _l1tEmuRawEt; - std::vector _l1tEmuIsoEt; - - std::vector _l1tQualJet; - std::vector _l1tPtJet; - std::vector _l1tEtaJet; - std::vector _l1tPhiJet; - std::vector _l1tIsoJet; - std::vector _l1tTowerIEtaJet; - std::vector _l1tTowerIPhiJet; - std::vector _l1tRawEtJet; - - std::vector _l1tEmuQualJet; - std::vector _l1tEmuPtJet; - std::vector _l1tEmuEtaJet; - std::vector _l1tEmuPhiJet; - std::vector _l1tEmuIsoJet; - std::vector _l1tEmuTowerIEtaJet; - std::vector _l1tEmuTowerIPhiJet; - std::vector _l1tEmuRawEtJet; - - std::vector _l1tEGQual; - std::vector _l1tEGPt; - std::vector _l1tEGEta; - std::vector _l1tEGPhi; - std::vector _l1tEGIso; - - std::vector _l1tEmuEGQual; - std::vector _l1tEmuEGPt; - std::vector _l1tEmuEGEta; - std::vector _l1tEmuEGPhi; - std::vector _l1tEmuEGIso; - std::vector _l1tEmuEGNTT; - std::vector _l1tEmuEGTowerIEta; - std::vector _l1tEmuEGTowerIPhi; - std::vector _l1tEmuEGRawEt; - std::vector _l1tEmuEGIsoEt; - - std::vector _l1tMuQual; - std::vector _l1tMuPt; - std::vector _l1tMuEta; - std::vector _l1tMuPhi; - - std::vector _l1tEmuMuQual; - std::vector _l1tEmuMuPt; - std::vector _l1tEmuMuEta; - std::vector _l1tEmuMuPhi; - std::vector _l1tEmuMuIso; - - std::vector _hltTauPt; - std::vector _hltTauEta; - std::vector _hltTauPhi; - std::vector _hltTauTriggerBits; - - std::vector _hltMuPt; - std::vector _hltMuEta; - std::vector _hltMuPhi; - std::vector _hltMuTriggerBits; - - std::vector _hltElePt; - std::vector _hltEleEta; - std::vector _hltElePhi; - std::vector _hltEleTriggerBits; - - int _hltL2CaloJet_N; - std::vector _hltL2CaloJet_Pt; - std::vector _hltL2CaloJet_Eta; - std::vector _hltL2CaloJet_Phi; - std::vector _hltL2CaloJet_Iso; - - int _hltL2CaloJetIsoPix_N; - std::vector _hltL2CaloJetIsoPix_Pt; - std::vector _hltL2CaloJetIsoPix_Eta; - std::vector _hltL2CaloJetIsoPix_Phi; - - int _hltPixelTrack_N; - std::vector _hltPixelTrack_Pt; - std::vector _hltPixelTrack_Eta; - std::vector _hltPixelTrack_Phi; - - int _hltMergedTrackTauReg_N; - std::vector _hltMergedTrackTauReg_Pt; - std::vector _hltMergedTrackTauReg_Eta; - std::vector _hltMergedTrackTauReg_Phi; - - int _hltPFRegCand_N; - std::vector _hltPFRegCand_Pt; - std::vector _hltPFRegCand_Eta; - std::vector _hltPFRegCand_Phi; - - int _hltAK4PFRegJet_N; - std::vector _hltAK4PFRegJet_Pt; - std::vector _hltAK4PFRegJet_Eta; - std::vector _hltAK4PFRegJet_Phi; - - int _hltPFRegCandJetReg_N; - std::vector _hltPFRegCandJetReg_Pt; - std::vector _hltPFRegCandJetReg_Eta; - std::vector _hltPFRegCandJetReg_Phi; - - int _hltTauPFJetsRecoTauChargedHadronsReg_N; - std::vector _hltTauPFJetsRecoTauChargedHadronsReg_Pt; - std::vector _hltTauPFJetsRecoTauChargedHadronsReg_Eta; - std::vector _hltTauPFJetsRecoTauChargedHadronsReg_Phi; - - int _hltPFTauPiZerosReg_N; - std::vector _hltPFTauPiZerosReg_Pt; - std::vector _hltPFTauPiZerosReg_Eta; - std::vector _hltPFTauPiZerosReg_Phi; - - int _hltPFTauSansRefReg_N; - std::vector _hltPFTauSansRefReg_Pt; - std::vector _hltPFTauSansRefReg_Eta; - std::vector _hltPFTauSansRefReg_Phi; - - int _hltPFTauTrack_N; - std::vector _hltPFTauTrack_Pt; - std::vector _hltPFTauTrack_Eta; - std::vector _hltPFTauTrack_Phi; - - int _hltPFTauTrackReg_N; - std::vector _hltPFTauTrackReg_Pt; - std::vector _hltPFTauTrackReg_Eta; - std::vector _hltPFTauTrackReg_Phi; - - int _hltPFTau35TrackPt1Reg_N; - std::vector _hltPFTau35TrackPt1Reg_Pt; - std::vector _hltPFTau35TrackPt1Reg_Eta; - std::vector _hltPFTau35TrackPt1Reg_Phi; - - edm::EDGetTokenT _L1TauTag ; - edm::EDGetTokenT _L1EmuTauTag ; - edm::EDGetTokenT _l1tJetTag; - edm::EDGetTokenT _l1tEmuJetTag; - edm::EDGetTokenT _L1EGTag ; - edm::EDGetTokenT _L1EmuEGTag ; - edm::EDGetTokenT _L1MuTag ; - edm::EDGetTokenT _L1EmuMuTag ; - edm::EDGetTokenT _triggerObjects; - edm::EDGetTokenT _triggerBits; - - edm::EDGetTokenT _hltL2CaloJet_ForIsoPix_Tag; - edm::EDGetTokenT _hltL2CaloJet_ForIsoPix_IsoTag; - - edm::EDGetTokenT _hltPixelTracksRegForTau_Tag; - edm::EDGetTokenT _hltMergedTracksTauReg_Tag; - edm::EDGetTokenT _hltPFRegCand_Tag; - edm::EDGetTokenT _hltAK4PFRegJet_Tag; - edm::EDGetTokenT _hltPFTauSansRefReg_Tag; - - edm::EDGetTokenT _jet_token; - edm::EDGetTokenT > _hltPFJetRegion_Tag; - edm::EDGetTokenT _chargedHadron_token; - edm::EDGetTokenT _piZero_token; - - //!Contains the parameters - tVParameterSet _parameters; - - edm::InputTag _processName; - //! Maximum - std::bitset _EventTriggerBitSet; - HLTConfigProvider _hltConfig; - - vector _triggerlist; - vector _indexOfPath; - vector _foundPaths; - -}; - -/* - ██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██ - ██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ - ██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ - ██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████ -*/ - -// ----Constructor and Destructor ----- -ZeroBias::ZeroBias(const edm::ParameterSet& iConfig) : - _L1TauTag (consumes (iConfig.getParameter("L1Tau"))), - _L1EmuTauTag (consumes (iConfig.getParameter("L1EmuTau"))), - _l1tJetTag (consumes (iConfig.getParameter("l1tJetCollection"))), - _l1tEmuJetTag (consumes (iConfig.getParameter("l1tEmuJetCollection"))), - _L1EGTag (consumes (iConfig.getParameter("L1EG"))), - _L1EmuEGTag (consumes (iConfig.getParameter("L1EmuEG"))), - _L1MuTag (consumes (iConfig.getParameter("L1Mu"))), - _L1EmuMuTag (consumes (iConfig.getParameter("L1EmuMu"))), - _triggerObjects (consumes (iConfig.getParameter("triggerSet"))), - _triggerBits (consumes (iConfig.getParameter("triggerResultsLabel"))), - _hltL2CaloJet_ForIsoPix_Tag(consumes (iConfig.getParameter("L2CaloJet_ForIsoPix_Collection"))), - _hltL2CaloJet_ForIsoPix_IsoTag(consumes (iConfig.getParameter("L2CaloJet_ForIsoPix_IsoCollection"))), - _hltPixelTracksRegForTau_Tag(consumes(iConfig.getParameter("PixelTrackCollection"))), - _hltMergedTracksTauReg_Tag(consumes(iConfig.getParameter("MergedTrackCollection"))), - _hltPFRegCand_Tag(consumes(iConfig.getParameter("PFRegCandCollection"))), - _hltAK4PFRegJet_Tag(consumes(iConfig.getParameter("AK4PFRegJetCollection"))), - _hltPFTauSansRefReg_Tag(consumes(iConfig.getParameter("PFTauSansRefRegCollection"))), - _jet_token(consumes(iConfig.getParameter("AK4PFRegJetCollection"))), - _hltPFJetRegion_Tag(consumes >(iConfig.getParameter("PFJetRegionCollection"))), - _chargedHadron_token(consumes(iConfig.getParameter("PFJetChargedHadronAssociation"))), - _piZero_token(consumes(iConfig.getParameter("JetPiZeroAssociation"))) -{ - this -> _treeName = iConfig.getParameter("treeName"); - this -> _processName = iConfig.getParameter("triggerResultsLabel"); - - TString triggerName; - edm::Service fs; - this -> _triggerNamesTree = fs -> make("triggerNames", "triggerNames"); - this -> _triggerNamesTree -> Branch("triggerNames",&triggerName); - - //Building the trigger arrays - const std::vector& HLTList = iConfig.getParameter > ("triggerList"); - for (const edm::ParameterSet& parameterSet : HLTList) { - tParameterSet pSet; - pSet.hltPath = parameterSet.getParameter("HLT"); - triggerName = pSet.hltPath; - pSet.hltFilters1 = parameterSet.getParameter >("path1"); - pSet.hltFilters2 = parameterSet.getParameter >("path2"); - pSet.leg1 = parameterSet.getParameter("leg1"); - pSet.leg2 = parameterSet.getParameter("leg2"); - - _triggerlist.push_back(pSet.hltPath); - this -> _parameters.push_back(pSet); - this -> _triggerNamesTree -> Fill(); - } - - - this -> Initialize(); - return; -} - -ZeroBias::~ZeroBias() -{} - -void ZeroBias::beginRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - - Bool_t changedConfig = false; - - if(!this -> _hltConfig.init(iRun, iSetup, this -> _processName.process(), changedConfig)){ - edm::LogError("HLTMatchingFilter") << "Initialization of HLTConfigProvider failed!!"; - return; - } - - - const edm::TriggerNames::Strings& triggerNames = this -> _hltConfig.triggerNames(); - std::cout << " ===== LOOKING FOR THE PATH INDEXES =====" << std::endl; - for (tParameterSet& parameter : this -> _parameters){ - const std::string& hltPath = parameter.hltPath; - bool found = false; - for(unsigned int j=0; j < triggerNames.size(); j++) - { - std::cout << triggerNames[j] << std::endl; - if (triggerNames[j].find(hltPath) != std::string::npos) { - found = true; - parameter.hltPathIndex = j; - - std::cout << "### FOUND AT INDEX #" << j << " --> " << triggerNames[j] << std::endl; - } - } - if (!found) parameter.hltPathIndex = -1; - } - - - if(changedConfig || _foundPaths.size()==0){ - //cout<<"The present menu is "< _indexevents = 0; - this -> _runNumber = 0; - this -> _lumi = 0; - - this -> _l1tPt .clear(); - this -> _l1tEta .clear(); - this -> _l1tPhi .clear(); - this -> _l1tQual .clear(); - this -> _l1tIso .clear(); - - this -> _l1tEmuPt .clear(); - this -> _l1tEmuEta .clear(); - this -> _l1tEmuPhi .clear(); - this -> _l1tEmuQual .clear(); - this -> _l1tEmuIso .clear(); - this -> _l1tEmuNTT .clear(); - this -> _l1tEmuHasEM .clear(); - this -> _l1tEmuIsMerged .clear(); - this -> _l1tEmuTowerIEta .clear(); - this -> _l1tEmuTowerIPhi .clear(); - this -> _l1tEmuRawEt .clear(); - this -> _l1tEmuIsoEt .clear(); - - this -> _l1tPtJet .clear(); - this -> _l1tEtaJet .clear(); - this -> _l1tPhiJet .clear(); - this -> _l1tIsoJet .clear(); - this -> _l1tQualJet .clear(); - this -> _l1tTowerIEtaJet .clear(); - this -> _l1tTowerIPhiJet .clear(); - this -> _l1tRawEtJet .clear(); - - this -> _l1tEmuPtJet .clear(); - this -> _l1tEmuEtaJet .clear(); - this -> _l1tEmuPhiJet .clear(); - this -> _l1tEmuIsoJet .clear(); - this -> _l1tEmuQualJet .clear(); - this -> _l1tEmuTowerIEtaJet .clear(); - this -> _l1tEmuTowerIPhiJet .clear(); - this -> _l1tEmuRawEtJet .clear(); - - this -> _l1tEGPt .clear(); - this -> _l1tEGEta .clear(); - this -> _l1tEGPhi .clear(); - this -> _l1tEGQual .clear(); - this -> _l1tEGIso .clear(); - - this -> _l1tEmuEGPt .clear(); - this -> _l1tEmuEGEta .clear(); - this -> _l1tEmuEGPhi .clear(); - this -> _l1tEmuEGQual .clear(); - this -> _l1tEmuEGIso .clear(); - this -> _l1tEmuEGNTT .clear(); - this -> _l1tEmuEGTowerIEta .clear(); - this -> _l1tEmuEGTowerIPhi .clear(); - this -> _l1tEmuEGRawEt .clear(); - this -> _l1tEmuEGIsoEt .clear(); - - this -> _l1tMuPt .clear(); - this -> _l1tMuEta .clear(); - this -> _l1tMuPhi .clear(); - this -> _l1tMuQual .clear(); - - this -> _l1tEmuMuPt .clear(); - this -> _l1tEmuMuEta .clear(); - this -> _l1tEmuMuPhi .clear(); - this -> _l1tEmuMuQual .clear(); - - this -> _hltTauPt .clear(); - this -> _hltTauEta .clear(); - this -> _hltTauPhi .clear(); - this -> _hltTauTriggerBits .clear(); - - this -> _hltMuPt .clear(); - this -> _hltMuEta .clear(); - this -> _hltMuPhi .clear(); - this -> _hltMuTriggerBits .clear(); - - this -> _hltElePt .clear(); - this -> _hltEleEta .clear(); - this -> _hltElePhi .clear(); - this -> _hltEleTriggerBits .clear(); - - this -> _hltL2CaloJet_N = 0; - this -> _hltL2CaloJet_Pt.clear(); - this -> _hltL2CaloJet_Eta.clear(); - this -> _hltL2CaloJet_Phi.clear(); - this -> _hltL2CaloJet_Iso.clear(); - - this -> _hltL2CaloJetIsoPix_N = 0; - this -> _hltL2CaloJetIsoPix_Pt.clear(); - this -> _hltL2CaloJetIsoPix_Eta.clear(); - this -> _hltL2CaloJetIsoPix_Phi.clear(); - - this -> _hltPixelTrack_N = 0; - this -> _hltPixelTrack_Pt.clear(); - this -> _hltPixelTrack_Eta.clear(); - this -> _hltPixelTrack_Phi.clear(); - - this -> _hltMergedTrackTauReg_N = 0; - this -> _hltMergedTrackTauReg_Pt.clear(); - this -> _hltMergedTrackTauReg_Eta.clear(); - this -> _hltMergedTrackTauReg_Phi.clear(); - - this -> _hltPFRegCand_N = 0; - this -> _hltPFRegCand_Pt.clear(); - this -> _hltPFRegCand_Eta.clear(); - this -> _hltPFRegCand_Phi.clear(); - - this -> _hltAK4PFRegJet_N = 0; - this -> _hltAK4PFRegJet_Pt.clear(); - this -> _hltAK4PFRegJet_Eta.clear(); - this -> _hltAK4PFRegJet_Phi.clear(); - - this -> _hltPFRegCandJetReg_N = 0; - this -> _hltPFRegCandJetReg_Pt.clear(); - this -> _hltPFRegCandJetReg_Eta.clear(); - this -> _hltPFRegCandJetReg_Phi.clear(); - - this -> _hltTauPFJetsRecoTauChargedHadronsReg_N = 0; - this -> _hltTauPFJetsRecoTauChargedHadronsReg_Pt.clear(); - this -> _hltTauPFJetsRecoTauChargedHadronsReg_Eta.clear(); - this -> _hltTauPFJetsRecoTauChargedHadronsReg_Phi.clear(); - - this -> _hltPFTauPiZerosReg_N = 0; - this -> _hltPFTauPiZerosReg_Pt.clear(); - this -> _hltPFTauPiZerosReg_Eta.clear(); - this -> _hltPFTauPiZerosReg_Phi.clear(); - - - this -> _hltPFTauSansRefReg_N = 0; - this -> _hltPFTauSansRefReg_Pt.clear(); - this -> _hltPFTauSansRefReg_Eta.clear(); - this -> _hltPFTauSansRefReg_Phi.clear(); - - this -> _hltPFTauTrack_N = 0; - this -> _hltPFTauTrack_Pt.clear(); - this -> _hltPFTauTrack_Eta.clear(); - this -> _hltPFTauTrack_Phi.clear(); - - this -> _hltPFTauTrackReg_N = 0; - this -> _hltPFTauTrackReg_Pt.clear(); - this -> _hltPFTauTrackReg_Eta.clear(); - this -> _hltPFTauTrackReg_Phi.clear(); - - this -> _hltPFTau35TrackPt1Reg_N = 0; - this -> _hltPFTau35TrackPt1Reg_Pt.clear(); - this -> _hltPFTau35TrackPt1Reg_Eta.clear(); - this -> _hltPFTau35TrackPt1Reg_Phi.clear(); - -} - - -void ZeroBias::beginJob() -{ - edm::Service fs; - this -> _tree = fs -> make(this -> _treeName.c_str(), this -> _treeName.c_str()); - - //Branches - this -> _tree -> Branch("EventNumber", &_indexevents); - this -> _tree -> Branch("RunNumber", &_runNumber); - this -> _tree -> Branch("lumi", &_lumi); - this -> _tree -> Branch("EventTriggerBits", &_EventTriggerBits, "EventTriggerBits/L"); - - this -> _tree -> Branch("l1tPt", &_l1tPt); - this -> _tree -> Branch("l1tEta", &_l1tEta); - this -> _tree -> Branch("l1tPhi", &_l1tPhi); - this -> _tree -> Branch("l1tQual", &_l1tQual); - this -> _tree -> Branch("l1tIso", &_l1tIso); - - this -> _tree -> Branch("l1tEmuPt", &_l1tEmuPt); - this -> _tree -> Branch("l1tEmuEta", &_l1tEmuEta); - this -> _tree -> Branch("l1tEmuPhi", &_l1tEmuPhi); - this -> _tree -> Branch("l1tEmuQual", &_l1tEmuQual); - this -> _tree -> Branch("l1tEmuIso", &_l1tEmuIso); - this -> _tree -> Branch("l1tEmuNTT", &_l1tEmuNTT); - this -> _tree -> Branch("l1tEmuHasEM", &_l1tEmuHasEM); - this -> _tree -> Branch("l1tEmuIsMerged", &_l1tEmuIsMerged); - this -> _tree -> Branch("l1tEmuTowerIEta", &_l1tEmuTowerIEta); - this -> _tree -> Branch("l1tEmuTowerIPhi", &_l1tEmuTowerIPhi); - this -> _tree -> Branch("l1tEmuRawEt", &_l1tEmuRawEt); - this -> _tree -> Branch("l1tEmuIsoEt", &_l1tEmuIsoEt); - - this -> _tree -> Branch("l1tPtJet", &_l1tPtJet); - this -> _tree -> Branch("l1tEtaJet", &_l1tEtaJet); - this -> _tree -> Branch("l1tPhiJet", &_l1tPhiJet); - this -> _tree -> Branch("l1tQualJet", &_l1tQualJet); - this -> _tree -> Branch("l1tIsoJet", &_l1tIsoJet); - this -> _tree -> Branch("l1tTowerIEtaJet", &_l1tTowerIEtaJet); - this -> _tree -> Branch("l1tTowerIPhiJet", &_l1tTowerIPhiJet); - this -> _tree -> Branch("l1tRawEtJet", &_l1tRawEtJet); - - this -> _tree -> Branch("l1tEmuPtJet", &_l1tEmuPtJet); - this -> _tree -> Branch("l1tEmuEtaJet", &_l1tEmuEtaJet); - this -> _tree -> Branch("l1tEmuPhiJet", &_l1tEmuPhiJet); - this -> _tree -> Branch("l1tEmuQualJet", &_l1tEmuQualJet); - this -> _tree -> Branch("l1tEmuIsoJet", &_l1tEmuIsoJet); - this -> _tree -> Branch("l1tEmuTowerIEtaJet", &_l1tEmuTowerIEtaJet); - this -> _tree -> Branch("l1tEmuTowerIPhiJet", &_l1tEmuTowerIPhiJet); - this -> _tree -> Branch("l1tEmuRawEtJet", &_l1tEmuRawEtJet); - - this -> _tree -> Branch("l1tEGPt", &_l1tEGPt); - this -> _tree -> Branch("l1tEGEta", &_l1tEGEta); - this -> _tree -> Branch("l1tEGPhi", &_l1tEGPhi); - this -> _tree -> Branch("l1tEGQual", &_l1tEGQual); - this -> _tree -> Branch("l1tEGIso", &_l1tEGIso); - - this -> _tree -> Branch("l1tEmuEGPt", &_l1tEmuEGPt); - this -> _tree -> Branch("l1tEmuEGEta", &_l1tEmuEGEta); - this -> _tree -> Branch("l1tEmuEGPhi", &_l1tEmuEGPhi); - this -> _tree -> Branch("l1tEmuEGQual", &_l1tEmuEGQual); - this -> _tree -> Branch("l1tEmuEGIso", &_l1tEmuEGIso); - this -> _tree -> Branch("l1tEmuEGNTT", &_l1tEmuEGNTT); - this -> _tree -> Branch("l1tEmuEGTowerIEta", &_l1tEmuEGTowerIEta); - this -> _tree -> Branch("l1tEmuEGTowerIPhi", &_l1tEmuEGTowerIPhi); - this -> _tree -> Branch("l1tEmuEGRawEt", &_l1tEmuEGRawEt); - this -> _tree -> Branch("l1tEmuEGIsoEt", &_l1tEmuEGIsoEt); - - this -> _tree -> Branch("l1tMuPt", &_l1tMuPt); - this -> _tree -> Branch("l1tMuEta", &_l1tMuEta); - this -> _tree -> Branch("l1tMuPhi", &_l1tMuPhi); - this -> _tree -> Branch("l1tMuQual", &_l1tMuQual); - - this -> _tree -> Branch("l1tEmuMuPt", &_l1tEmuMuPt); - this -> _tree -> Branch("l1tEmuMuEta", &_l1tEmuMuEta); - this -> _tree -> Branch("l1tEmuMuPhi", &_l1tEmuMuPhi); - this -> _tree -> Branch("l1tEmuMuQual", &_l1tEmuMuQual); - - this -> _tree -> Branch("hltTauPt", &_hltTauPt); - this -> _tree -> Branch("hltTauEta", &_hltTauEta); - this -> _tree -> Branch("hltTauPhi", &_hltTauPhi); - this -> _tree -> Branch("hltTauTriggerBits", &_hltTauTriggerBits); - - this -> _tree -> Branch("hltMuPt", &_hltMuPt); - this -> _tree -> Branch("hltMuEta", &_hltMuEta); - this -> _tree -> Branch("hltMuPhi", &_hltMuPhi); - this -> _tree -> Branch("hltMuTriggerBits", &_hltMuTriggerBits); - - this -> _tree -> Branch("hltElePt", &_hltElePt); - this -> _tree -> Branch("hltEleEta", &_hltEleEta); - this -> _tree -> Branch("hltElePhi", &_hltElePhi); - this -> _tree -> Branch("hltEleTriggerBits", &_hltEleTriggerBits); - - this -> _tree -> Branch("hltL2CaloJet_N", &_hltL2CaloJet_N, "hltL2CaloJet_N/I"); - this -> _tree -> Branch("hltL2CaloJet_Pt", &_hltL2CaloJet_Pt); - this -> _tree -> Branch("hltL2CaloJet_Eta", &_hltL2CaloJet_Eta); - this -> _tree -> Branch("hltL2CaloJet_Phi", &_hltL2CaloJet_Phi); - this -> _tree -> Branch("hltL2CaloJet_Iso", &_hltL2CaloJet_Iso); - - this -> _tree -> Branch("hltL2CaloJetIsoPix_N", &_hltL2CaloJetIsoPix_N, "hltL2CaloJetIsoPix_N/I"); - this -> _tree -> Branch("hltL2CaloJetIsoPix_Pt", &_hltL2CaloJetIsoPix_Pt); - this -> _tree -> Branch("hltL2CaloJetIsoPix_Eta", &_hltL2CaloJetIsoPix_Eta); - this -> _tree -> Branch("hltL2CaloJetIsoPix_Phi", &_hltL2CaloJetIsoPix_Phi); - - this -> _tree -> Branch("hltPixelTrack_N", &_hltPixelTrack_N, "hltPixelTrack_N/I"); - this -> _tree -> Branch("hltPixelTrack_Pt", &_hltPixelTrack_Pt); - this -> _tree -> Branch("hltPixelTrack_Eta", &_hltPixelTrack_Eta); - this -> _tree -> Branch("hltPixelTrack_Phi", &_hltPixelTrack_Phi); - - this -> _tree -> Branch("hltMergedTrackTauReg_N", &_hltMergedTrackTauReg_N, "hltMergedTrackTauReg_N/I"); - this -> _tree -> Branch("hltMergedTrackTauReg_Pt", &_hltMergedTrackTauReg_Pt); - this -> _tree -> Branch("hltMergedTrackTauReg_Eta", &_hltMergedTrackTauReg_Eta); - this -> _tree -> Branch("hltMergedTrackTauReg_Phi", &_hltMergedTrackTauReg_Phi); - - this -> _tree -> Branch("hltPFRegCand_N", &_hltPFRegCand_N, "hltPFRegCand_N/I"); - this -> _tree -> Branch("hltPFRegCand_Pt", &_hltPFRegCand_Pt); - this -> _tree -> Branch("hltPFRegCand_Eta", &_hltPFRegCand_Eta); - this -> _tree -> Branch("hltPFRegCand_Phi", &_hltPFRegCand_Phi); - - this -> _tree -> Branch("hltAK4PFRegJet_N", &_hltAK4PFRegJet_N, "hltAK4PFRegJet_N/I"); - this -> _tree -> Branch("hltAK4PFRegJet_Pt", &_hltAK4PFRegJet_Pt); - this -> _tree -> Branch("hltAK4PFRegJet_Eta", &_hltAK4PFRegJet_Eta); - this -> _tree -> Branch("hltAK4PFRegJet_Phi", &_hltAK4PFRegJet_Phi); - - this -> _tree -> Branch("hltPFRegCandJetReg_N", &_hltPFRegCandJetReg_N, "hltPFRegCandJetReg_N/I"); - this -> _tree -> Branch("hltPFRegCandJetReg_Pt", &_hltPFRegCandJetReg_Pt); - this -> _tree -> Branch("hltPFRegCandJetReg_Eta", &_hltPFRegCandJetReg_Eta); - this -> _tree -> Branch("hltPFRegCandJetReg_Phi", &_hltPFRegCandJetReg_Phi); - - this -> _tree -> Branch("hltTauPFJetsRecoTauChargedHadronsReg_N", &_hltTauPFJetsRecoTauChargedHadronsReg_N, "hltTauPFJetsRecoTauChargedHadronsReg_N/I"); - this -> _tree -> Branch("hltTauPFJetsRecoTauChargedHadronsReg_Pt", &_hltTauPFJetsRecoTauChargedHadronsReg_Pt); - this -> _tree -> Branch("hltTauPFJetsRecoTauChargedHadronsReg_Eta", &_hltTauPFJetsRecoTauChargedHadronsReg_Eta); - this -> _tree -> Branch("hltTauPFJetsRecoTauChargedHadronsReg_Phi", &_hltTauPFJetsRecoTauChargedHadronsReg_Phi); - - this -> _tree -> Branch("hltPFPFTauPiZerosReg_N", &_hltPFTauPiZerosReg_N, "hltPFTauPiZerosReg_N/I"); - this -> _tree -> Branch("hltPFPFTauPiZerosReg_Pt", &_hltPFTauPiZerosReg_Pt); - this -> _tree -> Branch("hltPFPFTauPiZerosReg_Eta", &_hltPFTauPiZerosReg_Eta); - this -> _tree -> Branch("hltPFPFTauPiZerosReg_Phi", &_hltPFTauPiZerosReg_Phi); - - this -> _tree -> Branch("hltPFTauSansRefReg_N", &_hltPFTauSansRefReg_N, "hltPFTauSansRefReg_N/I"); - this -> _tree -> Branch("hltPFTauSansRefReg_Pt", &_hltPFTauSansRefReg_Pt); - this -> _tree -> Branch("hltPFTauSansRefReg_Eta", &_hltPFTauSansRefReg_Eta); - this -> _tree -> Branch("hltPFTauSansRefReg_Phi", &_hltPFTauSansRefReg_Phi); - - this -> _tree -> Branch("hltPFTauTrack_N", &_hltPFTauTrack_N, "hltPFTauTrack_N/I"); - this -> _tree -> Branch("hltPFTauTrack_Pt", &_hltPFTauTrack_Pt); - this -> _tree -> Branch("hltPFTauTrack_Eta", &_hltPFTauTrack_Eta); - this -> _tree -> Branch("hltPFTauTrack_Phi", &_hltPFTauTrack_Phi); - - this -> _tree -> Branch("hltPFTauTrackReg_N", &_hltPFTauTrackReg_N, "hltPFTauTrackReg_N/I"); - this -> _tree -> Branch("hltPFTauTrackReg_Pt", &_hltPFTauTrackReg_Pt); - this -> _tree -> Branch("hltPFTauTrackReg_Eta", &_hltPFTauTrackReg_Eta); - this -> _tree -> Branch("hltPFTauTrackReg_Phi", &_hltPFTauTrackReg_Phi); - - this -> _tree -> Branch("hltPFTau35TrackPt1Reg_N", &_hltPFTau35TrackPt1Reg_N, "hltPFTau35TrackPt1Reg_N/I"); - this -> _tree -> Branch("hltPFTau35TrackPt1Reg_Pt", &_hltPFTau35TrackPt1Reg_Pt); - this -> _tree -> Branch("hltPFTau35TrackPt1Reg_Eta", &_hltPFTau35TrackPt1Reg_Eta); - this -> _tree -> Branch("hltPFTau35TrackPt1Reg_Phi", &_hltPFTau35TrackPt1Reg_Phi); - - return; -} - - -void ZeroBias::endJob() -{ - return; -} - - -void ZeroBias::endRun(edm::Run const& iRun, edm::EventSetup const& iSetup) -{ - return; -} - - -void ZeroBias::analyze(const edm::Event& iEvent, const edm::EventSetup& eSetup) -{ - this -> Initialize(); - - _indexevents = iEvent.id().event(); - _runNumber = iEvent.id().run(); - _lumi = iEvent.luminosityBlock(); - - edm::Handle< BXVector > L1TauHandle; - try {iEvent.getByToken(_L1TauTag, L1TauHandle);} catch (...) {;} - - if(L1TauHandle.isValid()){ - for (l1t::TauBxCollection::const_iterator bx0TauIt = L1TauHandle->begin(0); bx0TauIt != L1TauHandle->end(0) ; bx0TauIt++) - { - const l1t::Tau& l1tTau = *bx0TauIt; - - //cout<<"FW Tau, pT = "< _EventTriggerBitSet[x] = true; - - if(hasTriggerTauType) - { - //std::cout << "#### FOUND TAU WITH HLT PATH " << x << " ####" << std::endl; - this -> _hltTauPt.push_back(obj.pt()); - this -> _hltTauEta.push_back(obj.eta()); - this -> _hltTauPhi.push_back(obj.phi()); - this -> _hltTauTriggerBits.push_back( x ); - } - - if(hasTriggerMuType) - { - //std::cout << "#### FOUND MUON WITH HLT PATH " << x << " ####" << std::endl; - this -> _hltMuPt.push_back(obj.pt()); - this -> _hltMuEta.push_back(obj.eta()); - this -> _hltMuPhi.push_back(obj.phi()); - this -> _hltMuTriggerBits.push_back( x ); - } - - if(hasTriggerEleType) - { - //std::cout << "#### FOUND ELE WITH HLT PATH " << x << " ####" << std::endl; - this -> _hltElePt.push_back(obj.pt()); - this -> _hltEleEta.push_back(obj.eta()); - this -> _hltElePhi.push_back(obj.phi()); - this -> _hltEleTriggerBits.push_back( x ); - } - - } - - x++; - } - - const std::vector& L2CaloJetIsoPix_filters = {"hltL2TauIsoFilterL1TauSeeded"}; - if (this -> hasFilters(obj, L2CaloJetIsoPix_filters)){ - this -> _hltL2CaloJetIsoPix_N++; - this -> _hltL2CaloJetIsoPix_Pt.push_back(obj.pt()); - this -> _hltL2CaloJetIsoPix_Eta.push_back(obj.eta()); - this -> _hltL2CaloJetIsoPix_Phi.push_back(obj.phi()); - } - - const std::vector& PFTauTrack_filters = {"hltPFTauTrack"}; - if (this -> hasFilters(obj, PFTauTrack_filters)){ - this -> _hltPFTauTrack_N++; - this -> _hltPFTauTrack_Pt.push_back(obj.pt()); - this -> _hltPFTauTrack_Eta.push_back(obj.eta()); - this -> _hltPFTauTrack_Phi.push_back(obj.phi()); - } - - const std::vector& PFTauTrackReg_filters = {"hltPFTauTrackReg"}; - if (this -> hasFilters(obj, PFTauTrackReg_filters)){ - this -> _hltPFTauTrackReg_N++; - this -> _hltPFTauTrackReg_Pt.push_back(obj.pt()); - this -> _hltPFTauTrackReg_Eta.push_back(obj.eta()); - this -> _hltPFTauTrackReg_Phi.push_back(obj.phi()); - } - - const std::vector& PFTau35TrackPt1Reg_filters = {"hltDoublePFTau35TrackPt1Reg"}; - if (this -> hasFilters(obj, PFTau35TrackPt1Reg_filters)){ - this -> _hltPFTau35TrackPt1Reg_N++; - this -> _hltPFTau35TrackPt1Reg_Pt.push_back(obj.pt()); - this -> _hltPFTau35TrackPt1Reg_Eta.push_back(obj.eta()); - this -> _hltPFTau35TrackPt1Reg_Phi.push_back(obj.phi()); - } - - } - - } - - - - edm::Handle< reco::CaloJetCollection > L2CaloJets_ForIsoPix_Handle; - try {iEvent.getByToken(_hltL2CaloJet_ForIsoPix_Tag, L2CaloJets_ForIsoPix_Handle);} catch (...) {;} - - edm::Handle< reco::JetTagCollection > L2CaloJets_ForIsoPix_IsoHandle; - try {iEvent.getByToken(_hltL2CaloJet_ForIsoPix_IsoTag, L2CaloJets_ForIsoPix_IsoHandle);} catch (...) {;} - - - if(L2CaloJets_ForIsoPix_Handle.isValid() && L2CaloJets_ForIsoPix_IsoHandle.isValid()){ - - for (auto const & jet : *L2CaloJets_ForIsoPix_IsoHandle){ - edm::Ref jetRef = edm::Ref(L2CaloJets_ForIsoPix_Handle,jet.first.key()); - _hltL2CaloJet_N++; - _hltL2CaloJet_Pt.push_back(jet.first->pt()); - _hltL2CaloJet_Eta.push_back(jet.first->eta()); - _hltL2CaloJet_Phi.push_back(jet.first->phi()); - _hltL2CaloJet_Iso.push_back(jet.second); - - } - - } - - - edm::Handle< reco::TrackCollection > PixelTracks_Handle; - try {iEvent.getByToken(_hltPixelTracksRegForTau_Tag, PixelTracks_Handle);} catch (...) {;} - - if(PixelTracks_Handle.isValid()){ - - const reco::TrackCollection tracks = *(PixelTracks_Handle.product()); - for(unsigned int i=0; i MergedTracksTauReg_Handle; - try {iEvent.getByToken(_hltMergedTracksTauReg_Tag, MergedTracksTauReg_Handle);} catch (...) {;} - - if(MergedTracksTauReg_Handle.isValid()){ - const reco::TrackCollection tracks = *(MergedTracksTauReg_Handle.product()); - for(unsigned int i=0; i PFRegCand_Handle; - try {iEvent.getByToken(_hltPFRegCand_Tag, PFRegCand_Handle);} catch (...) {;} - - if(PFRegCand_Handle.isValid()){ - - const reco::PFCandidateCollection PFRegCands = *(PFRegCand_Handle.product()); - for(size_t i=0; i PFJet_Handle; - try {iEvent.getByToken(_hltAK4PFRegJet_Tag, PFJet_Handle);} catch (...) {;} - - if(PFJet_Handle.isValid()){ - - const reco::PFJetCollection PFJets = *(PFJet_Handle.product()); - for(size_t i=0; i jetView; - edm::Handle > PFJetRegion_Handle; - edm::Handle chargedHadronAssoc; - edm::Handle piZeroAssoc; - - /*try {iEvent.getByToken(_hltAK4PFRegJet_Tag, jetView);} catch (...) {;} - try {iEvent.getByToken(_hltPFJetRegion_Tag, PFJetRegion_Handle);} catch (...) {;} - try {iEvent.getByToken(_chargedHadron_token, chargedHadronAssoc);} catch (...) {;} - try {iEvent.getByToken(_piZero_token, piZeroAssoc);} catch (...) {;}*/ - - iEvent.getByToken(_jet_token, jetView); - //iEvent.getByToken(_hltPFJetRegion_Tag, PFJetRegion_Handle); - iEvent.getByToken(_chargedHadron_token, chargedHadronAssoc); - iEvent.getByToken(_piZero_token, piZeroAssoc); - - - if(jetView.isValid() && chargedHadronAssoc.isValid() && piZeroAssoc.isValid()){ - - reco::PFJetRefVector jets = reco::tau::castView(jetView); - for(size_t ij = 0; ij < jetView->size(); ij++){ - const auto& jetRef = jetView->refAt(ij); - - //BOOST_FOREACH( reco::PFJetRef jetRef, jets ) { - - - //reco::PFJetRef jetRegionRef = (*PFJetRegion_Handle)[jetRef]; - const std::vector& chargedHadrons = (*chargedHadronAssoc)[jetRef]; - const std::vector& piZeros = (*piZeroAssoc)[jetRef]; - - /*this -> _hltPFRegCandJetReg_N++; - this -> _hltPFRegCandJetReg_Pt.push_back(jetRegionRef->pt()); - this -> _hltPFRegCandJetReg_Eta.push_back(jetRegionRef->eta()); - this -> _hltPFRegCandJetReg_Phi.push_back(jetRegionRef->phi());*/ - - for(unsigned int i_h=0;i_h PFTauSansRefReg_Handle; - try {iEvent.getByToken(_hltPFTauSansRefReg_Tag, PFTauSansRefReg_Handle);} catch (...) {;} - - if(PFTauSansRefReg_Handle.isValid()){ - - const reco::PFTauCollection PFTaus = *(PFTauSansRefReg_Handle.product()); - for(size_t i=0; i _tree -> Fill(); - -} - - - -bool ZeroBias::hasFilters(const pat::TriggerObjectStandAlone& obj , const std::vector& filtersToLookFor) { - - const std::vector& eventLabels = obj.filterLabels(); - for (const std::string& filter : filtersToLookFor) - { - //Looking for matching filters - bool found = false; - for (const std::string& label : eventLabels) - { - - if (label == filter) - { - - //std::cout << "#### FOUND FILTER " << label << " == " << filter << " ####" << std::endl; - found = true; - } - } - if(!found) return false; - } - - return true; -} - - - -Long64_t ZeroBias::FindTriggerBit(const vector foundPaths, const vector indexOfPaths, const edm::Handle& triggerResults){ - - Long64_t bit =0; - - for(int it=0;it<(int)_triggerlist.size();it++){ - for(int j=0;j<(int)foundPaths.size();j++){ - - string toCheckTrigger = _triggerlist.at(it) ; - string elemAllTriggers = foundPaths.at(j) ; - - if (elemAllTriggers.find(toCheckTrigger) != std::string::npos) // equivalent to wildcard at the end or beginning of triggername - { - if(triggerResults->accept(indexOfPaths[j]))bit |= long(1) < -DEFINE_FWK_MODULE(ZeroBias); - -#endif //ZeroBias_H diff --git a/TauTagAndProbe/plugins/genMatchTauFilter.cc b/TauTagAndProbe/plugins/genMatchTauFilter.cc deleted file mode 100644 index e6017bb7ec9..00000000000 --- a/TauTagAndProbe/plugins/genMatchTauFilter.cc +++ /dev/null @@ -1,66 +0,0 @@ -#ifndef GENMATCHTAUFILTER_H -#define GENMATCHTAUFILTER_H - -#include "FWCore/Framework/interface/EDFilter.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include - -#include - -using namespace edm; -using namespace std; -// using namespace reco; - - -class genMatchTauFilter : public edm::EDFilter { - - public: - genMatchTauFilter(const edm::ParameterSet &); - ~genMatchTauFilter(); - - private: - bool filter(edm::Event &, edm::EventSetup const&); - EDGetTokenT _tauTag; -}; - -genMatchTauFilter::genMatchTauFilter(const edm::ParameterSet & iConfig) : -_tauTag (consumes (iConfig.getParameter("taus"))) -{ - produces (); -} - -genMatchTauFilter::~genMatchTauFilter() -{} - -bool genMatchTauFilter::filter(edm::Event & iEvent, edm::EventSetup const& iSetup) -{ - std::unique_ptr resultTau ( new pat::TauRefVector ); - Handle tauHandle; - iEvent.getByToken (_tauTag, tauHandle); - - int goodTaus = 0; - for (uint itau = 0; itau < tauHandle->size(); ++itau) - { - const pat::TauRef tau = (*tauHandle)[itau] ; - if (tau->genJet() && deltaR(tau->p4(), tau->genJet()->p4()) < 0.5 && tau->genJet()->pt() > 8.) - { - ++goodTaus; - resultTau->push_back (tau); - } - } - if (goodTaus == 0) return false; - - iEvent.put(std::move(resultTau)); - - return true; -} - -#include -DEFINE_FWK_MODULE(genMatchTauFilter); - -#endif diff --git a/TauTagAndProbe/plugins/genMatchTauFilter_AOD.cc b/TauTagAndProbe/plugins/genMatchTauFilter_AOD.cc deleted file mode 100644 index d6dc8edd70c..00000000000 --- a/TauTagAndProbe/plugins/genMatchTauFilter_AOD.cc +++ /dev/null @@ -1,101 +0,0 @@ -#ifndef GENMATCHTAUFILTER_AOD_H -#define GENMATCHTAUFILTER_AOD_H - -#include "FWCore/Framework/interface/EDFilter.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include - -#include "DataFormats/TauReco/interface/PFTau.h" -#include "DataFormats/TauReco/interface/PFTauFwd.h" -#include "DataFormats/TauReco/interface/PFTauDiscriminator.h" - -#include "DataFormats/JetReco/interface/PFJetCollection.h" -#include "DataFormats/Common/interface/Association.h" -#include "DataFormats/JetReco/interface/GenJetCollection.h" - -#include - -#include - -using namespace edm; -using namespace std; - -class genMatchTauFilter_AOD : public edm::EDFilter { - - public: - genMatchTauFilter_AOD(const edm::ParameterSet &); - ~genMatchTauFilter_AOD(); - -private: - bool filter(edm::Event &, edm::EventSetup const&); - EDGetTokenT _tauTag; - EDGetTokenT _tauGenJetTag; -}; - -genMatchTauFilter_AOD::genMatchTauFilter_AOD(const edm::ParameterSet & iConfig) : - _tauTag (consumes (iConfig.getParameter("taus"))), - _tauGenJetTag (consumes (iConfig.getParameter("genJets"))) -{ - produces (); -} - -genMatchTauFilter_AOD::~genMatchTauFilter_AOD() -{} - -bool genMatchTauFilter_AOD::filter(edm::Event & iEvent, edm::EventSetup const& iSetup) -{ - std::unique_ptr resultTau ( new reco::PFTauCollection ); - Handle tauHandle; - iEvent.getByToken (_tauTag, tauHandle); - - Handle tauGenJetHandle; - iEvent.getByToken (_tauGenJetTag,tauGenJetHandle); - - int goodTaus = 0; - int nTaus = 0; - - for(reco::PFTauCollection::const_iterator it=tauHandle->begin(); it!=tauHandle->end(); ++it) - { - TLorentzVector jet; - jet.SetPtEtaPhiM(it->jetRef()->pt(),it->jetRef()->eta(),it->jetRef()->phi(),it->jetRef()->mass()); - - bool matchedToGen = false; - - for(reco::GenJetCollection::const_iterator it2=tauGenJetHandle->begin(); it2!=tauGenJetHandle->end(); ++it2) - { - TLorentzVector genJet; - genJet.SetPtEtaPhiM(it2->pt(),it2->eta(),it2->phi(),it2->mass()); - if(genJet.DeltaR(jet)<0.5) - { - matchedToGen = true; - break; - } - } - - TLorentzVector tau; - tau.SetPtEtaPhiM(it->pt(),it->eta(),it->phi(),it->mass()); - - if (it->jetRef()->pt() > 8. && tau.DeltaR(jet)<0.5 && matchedToGen) - { - ++goodTaus; - resultTau->push_back ((*tauHandle)[nTaus]); - } - - nTaus++; - - } - if (goodTaus == 0) return false; - - iEvent.put(std::move(resultTau)); - - return true; -} - -#include -DEFINE_FWK_MODULE(genMatchTauFilter_AOD); - -#endif diff --git a/TauTagAndProbe/plugins/muonNumberFilter.cc b/TauTagAndProbe/plugins/muonNumberFilter.cc deleted file mode 100644 index 7fa5e195a04..00000000000 --- a/TauTagAndProbe/plugins/muonNumberFilter.cc +++ /dev/null @@ -1,73 +0,0 @@ -#ifndef MUONNUMBERFILTER_H -#define MUONNUMBERFILTER_H - -#include "FWCore/Framework/interface/EDFilter.h" -#include "FWCore/ParameterSet/interface/ParameterSet.h" -#include -#include -#include -#include -#include -#include - -#include - -using namespace edm; -using namespace std; -// using namespace reco; - - -class muonNumberFilter : public edm::EDFilter { - - public: - muonNumberFilter(const edm::ParameterSet &); - ~muonNumberFilter(); - - private: - bool filter(edm::Event &, edm::EventSetup const&); - EDGetTokenT _muonTag; -}; - -muonNumberFilter::muonNumberFilter(const edm::ParameterSet & iConfig) : -_muonTag (consumes (iConfig.getParameter("src"))) -{} - -muonNumberFilter::~muonNumberFilter() -{} - -bool muonNumberFilter::filter(edm::Event & iEvent, edm::EventSetup const& iSetup) -{ - Handle muonHandle; - iEvent.getByToken (_muonTag, muonHandle); - - // very strict - veto all events with > 1 muon - // if (muonHandle->size() != 1) return false; - - - int nmu = 0; - for (unsigned int imu = 0; imu < muonHandle->size(); imu++) - { - const pat::Muon& mu = muonHandle->at(imu); - float pt = mu.pt(); - float iso = (mu.pfIsolationR04().sumChargedHadronPt + max(mu.pfIsolationR04().sumNeutralHadronEt + mu.pfIsolationR04().sumPhotonEt - 0.5 * mu.pfIsolationR04().sumPUPt, 0.0)) / pt; - //cout<<"muon pt = "< -DEFINE_FWK_MODULE(muonNumberFilter); - -#endif diff --git a/TauTagAndProbe/plugins/tParameterSet.h b/TauTagAndProbe/plugins/tParameterSet.h deleted file mode 100644 index 33176dc9545..00000000000 --- a/TauTagAndProbe/plugins/tParameterSet.h +++ /dev/null @@ -1,20 +0,0 @@ -#ifndef TPARAMETERSET_H -#define TPARAMETERSET_H - - -struct tParameterSet { - //!Contains the HLT paths - std::string hltPath; - //!Contains the index in trigger names where hltPath is kept - int hltPathIndex; - //!Contains the filters - std::vector hltFilters1; - std::vector hltFilters2; - //!Contains the leg - int leg1; - int leg2; -}; - -typedef std::vector tVParameterSet; - -#endif diff --git a/TauTagAndProbe/python/.gitignore b/TauTagAndProbe/python/.gitignore deleted file mode 100644 index 1a38d622732..00000000000 --- a/TauTagAndProbe/python/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -__init__.py -*.pyc diff --git a/TauTagAndProbe/python/MCanalysis_2016_cff.py b/TauTagAndProbe/python/MCanalysis_2016_cff.py deleted file mode 100644 index 225126ba63d..00000000000 --- a/TauTagAndProbe/python/MCanalysis_2016_cff.py +++ /dev/null @@ -1,222 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - -HLTLIST_TAG = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(13) - ), -) - -HLTLIST = cms.VPSet( - #MuTau - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTauJet20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterIsoMu19LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu18erIorSingleMu20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu19LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu19LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_LooseCombinedIsoPFTau20_v1"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTauJet20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19LooseCombinedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseCombinedIsoAgainstMuon", "hltOverlapFilterIsoMu19LooseCombinedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19MediumIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19MediumIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_MediumCombinedIsoPFTau32_Trk1_eta2p1_Reg_v1"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19MediumCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19MediumCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_TightCombinedIsoPFTau32_Trk1_eta2p1_Reg_v1"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19TightCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1TightCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19TightCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - # the following ones are extra! - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu20erIorSingleMu22erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu21LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu21LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21MediumIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21MediumIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_MediumCombinedIsoPFTau32_Trk1_eta2p1_Reg_v1"), - path1 = cms.vstring ("hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21MediumCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21MediumCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_TightCombinedIsoPFTau32_Trk1_eta2p1_Reg_v1"), - path1 = cms.vstring ("hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21TightCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1TightCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21TightCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - ) - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu27_v*'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -### ---------------------------------------------------------------------- -### gen info, only from MC -### ---------------------------------------------------------------------- -genInfo = cms.EDProducer("GenFiller", - src = cms.InputTag("prunedGenParticles"), - storeLightFlavAndGlu = cms.bool(True) # if True, store also udcs and gluons (first copy) - ) - -## only events where slimmedMuons has exactly 1 muon -muonNumberFilter = cms.EDFilter ("muonNumberFilter", - src = cms.InputTag("slimmedMuons") -) - - -## good muons for T&P -goodMuons = cms.EDFilter("PATMuonRefSelector", - src = cms.InputTag("slimmedMuons"), - cut = cms.string( - 'pt > 24 && abs(eta) < 2.1 ' # kinematics - '&& ( (pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - 0.5 * pfIsolationR04().sumPUPt, 0.0)) / pt() ) < 0.1 ' # isolation - '&& isMediumMuon()' # quality -- medium muon - ), - filter = cms.bool(True) -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("NewTauIDsEmbedded"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - #'&& tauID("byVVLooseIsolationMVArun2017v1DBoldDMwLT2017") > 0.5 ' # tau iso - NOTE: can as well use boolean discriminators with WP - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - - -genMatchedTaus = cms.EDFilter("genMatchTauFilter", - taus = cms.InputTag("goodTaus") - ) - - -## b jet veto : no additional b jets in the event (reject tt) -- use in sequence with -bjets = cms.EDFilter("PATJetRefSelector", - src = cms.InputTag("slimmedJets"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.4 ' #kinematics - '&& bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags") > 0.8484' # b tag with medium WP - ), - #filter = cms.bool(True) -) - - -TagAndProbe = cms.EDFilter("TauTagAndProbeFilter", - taus = cms.InputTag("goodTaus"), - muons = cms.InputTag("goodMuons"), - met = cms.InputTag("slimmedMETs"), - useMassCuts = cms.bool(False), - electrons = cms.InputTag("slimmedElectrons"), - eleLooseIdMap = cms.InputTag("egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wpLoose"), - eleVeto = cms.bool(True), - bjets = cms.InputTag("bjets") - -) - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - - - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer = cms.EDAnalyzer("Ntuplizer", - treeName = cms.string("TagAndProbe"), - isMC = cms.bool(True), - genCollection = cms.InputTag("generator"), - genPartCollection = cms.InputTag("genInfo"), - muons = cms.InputTag("goodMuons"), - taus = cms.InputTag("goodTaus"), - puInfo = cms.InputTag("slimmedAddPileupInfo"), - met = cms.InputTag("slimmedMETs"), - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - triggerList = HLTLIST, - triggerList_tag = HLTLIST_TAG, - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - - -TAndPseq = cms.Sequence( - hltFilter + - muonNumberFilter + - goodMuons + - goodTaus + - bjets + - TagAndProbe + - genInfo #+ - #genMatchedTaus -) - - -genMatchSeq = cms.Sequence( - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - patTriggerUnpacker + - Ntuplizer -) - diff --git a/TauTagAndProbe/python/MCanalysis_2017_cff.py b/TauTagAndProbe/python/MCanalysis_2017_cff.py deleted file mode 100644 index c5121bc47f4..00000000000 --- a/TauTagAndProbe/python/MCanalysis_2017_cff.py +++ /dev/null @@ -1,346 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - - -HLTLIST_TAG = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(13) - ), -) - - -HLTLIST = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_TightID_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoTightOOSCPhotonsPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24LooseChargedIsoTightOOSCPhotonsPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_TightID_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoTightOOSCPhotonsPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24MediumChargedIsoTightOOSCPhotonsPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24TightChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_TightID_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoTightOOSCPhotonsPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24TightChargedIsoTightOOSCPhotonsPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1LooseChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1LooseChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24LooseChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f20QL3trkIsoFiltered0p07"), - path2 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #MuTau CrossL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu+Tau40 - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau40_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau40_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau30_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sOrIsoMu24Tau30L1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau30MonitoringReg"), - path2 = cms.vstring ("hltIsoMu24SinglePFTau30TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau30MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau50_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sOrIsoMu24Tau50L1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau50MonitoringReg"), - path2 = cms.vstring ("hltIsoMu24SinglePFTau50TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau50MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu+Tau20 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTau20Reg_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20Reg"), - path2 = cms.vstring ("hltSinglePFTau20TrackPt1LooseChargedIsolationReg", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), -) - - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu27_v*'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -### ---------------------------------------------------------------------- -### gen info, only from MC -### ---------------------------------------------------------------------- -genInfo = cms.EDProducer("GenFiller", - src = cms.InputTag("prunedGenParticles"), - storeLightFlavAndGlu = cms.bool(True) # if True, store also udcs and gluons (first copy) - ) - -## only events where slimmedMuons has exactly 1 muon -muonNumberFilter = cms.EDFilter ("muonNumberFilter", - src = cms.InputTag("slimmedMuons") -) - - -## good muons for T&P -goodMuons = cms.EDFilter("PATMuonRefSelector", - src = cms.InputTag("slimmedMuons"), - cut = cms.string( - 'pt > 24 && abs(eta) < 2.1 ' # kinematics - '&& ( (pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - 0.5 * pfIsolationR04().sumPUPt, 0.0)) / pt() ) < 0.1 ' # isolation - '&& isMediumMuon()' # quality -- medium muon - ), - filter = cms.bool(True) -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("NewTauIDsEmbedded"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - #'&& tauID("byVVLooseIsolationMVArun2017v1DBoldDMwLT2017") > 0.5 ' # tau iso - NOTE: can as well use boolean discriminators with WP - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - - -genMatchedTaus = cms.EDFilter("genMatchTauFilter", - taus = cms.InputTag("goodTaus") - ) - - -## b jet veto : no additional b jets in the event (reject tt) -- use in sequence with -bjets = cms.EDFilter("PATJetRefSelector", - src = cms.InputTag("slimmedJets"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.4 ' #kinematics - '&& bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags") > 0.8484' # b tag with medium WP - ), - #filter = cms.bool(True) -) - - -TagAndProbe = cms.EDFilter("TauTagAndProbeFilter", - taus = cms.InputTag("goodTaus"), - muons = cms.InputTag("goodMuons"), - met = cms.InputTag("slimmedMETs"), - useMassCuts = cms.bool(False), - electrons = cms.InputTag("slimmedElectrons"), - eleLooseIdMap = cms.InputTag("egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wpLoose"), - eleVeto = cms.bool(True), - bjets = cms.InputTag("bjets") - -) - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - - - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer = cms.EDAnalyzer("Ntuplizer", - treeName = cms.string("TagAndProbe"), - isMC = cms.bool(True), - genCollection = cms.InputTag("generator"), - genPartCollection = cms.InputTag("genInfo"), - muons = cms.InputTag("goodMuons"), - taus = cms.InputTag("goodTaus"), - puInfo = cms.InputTag("slimmedAddPileupInfo"), - met = cms.InputTag("slimmedMETs"), - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - triggerList = HLTLIST, - triggerList_tag = HLTLIST_TAG, - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - - -TAndPseq = cms.Sequence( - hltFilter + - muonNumberFilter + - goodMuons + - goodTaus + - bjets + - TagAndProbe + - genInfo #+ - #genMatchedTaus -) - - -genMatchSeq = cms.Sequence( - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - patTriggerUnpacker + - Ntuplizer -) diff --git a/TauTagAndProbe/python/MCanalysis_cff.py b/TauTagAndProbe/python/MCanalysis_cff.py deleted file mode 100644 index ba84079d04e..00000000000 --- a/TauTagAndProbe/python/MCanalysis_cff.py +++ /dev/null @@ -1,278 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - - -HLTLIST_TAG = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(13) - ), -) - - -HLTLIST = cms.VPSet( - #Mu-Tau20 (VBF monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_MediumChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_TightChargedIsoPFTau20_Trk1_eta2p1_SingleL_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu-Tau35 (di-tau monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu-Tau50 (Tau+MET monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f24QL3trkIsoFiltered0p07"), - path2 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #Mu-Tau27 (signal path) - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #Mu+Tau HPS - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltHpsSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #SingleTau - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_v"), - path1 = cms.vstring ("hltSelectedPFTau180MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), -) - - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu27_v*'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -### ---------------------------------------------------------------------- -### gen info, only from MC -### ---------------------------------------------------------------------- -genInfo = cms.EDProducer("GenFiller", - src = cms.InputTag("prunedGenParticles"), - storeLightFlavAndGlu = cms.bool(True) # if True, store also udcs and gluons (first copy) - ) - -## only events where slimmedMuons has exactly 1 muon -muonNumberFilter = cms.EDFilter ("muonNumberFilter", - src = cms.InputTag("slimmedMuons") -) - - -## good muons for T&P -goodMuons = cms.EDFilter("PATMuonRefSelector", - src = cms.InputTag("slimmedMuons"), - cut = cms.string( - 'pt > 24 && abs(eta) < 2.1 ' # kinematics - '&& ( (pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - 0.5 * pfIsolationR04().sumPUPt, 0.0)) / pt() ) < 0.1 ' # isolation - '&& isMediumMuon()' # quality -- medium muon - ), - filter = cms.bool(True) -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("NewTauIDsEmbedded"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - #'&& tauID("decayModeFinding") > 0.5 || tauID("decayModeFindingNewDMs") > 0.5 ' # tau ID - #'&& tauID("byMediumIsolationMVArun2v1DBoldDMwLT") > 0.5 ' # tau iso - NOTE: can as well use boolean discriminators with WP - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - - -genMatchedTaus = cms.EDFilter("genMatchTauFilter", - taus = cms.InputTag("goodTaus") - ) - -## b jet veto : no additional b jets in the event (reject tt) -- use in sequence with -bjets = cms.EDFilter("PATJetRefSelector", - src = cms.InputTag("slimmedJets"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.4 ' #kinematics - '&& (bDiscriminator("pfDeepFlavourJetTags:probb") + bDiscriminator("pfDeepFlavourJetTags:probbb") + bDiscriminator("pfDeepFlavourJetTags:problepb")) > 0.2770 ' # b tag with medium WP - ), - #filter = cms.bool(True) -) - - -TagAndProbe = cms.EDFilter("TauTagAndProbeFilter", - taus = cms.InputTag("goodTaus"), - muons = cms.InputTag("goodMuons"), - met = cms.InputTag("slimmedMETs"), - useMassCuts = cms.bool(False), - electrons = cms.InputTag("slimmedElectrons"), - eleLooseIdMap = cms.InputTag("egmGsfElectronIDs:mvaEleID-Fall17-iso-V2-wpLoose"), - eleVeto = cms.bool(True), - bjets = cms.InputTag("bjets") -) - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - - - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer = cms.EDAnalyzer("Ntuplizer", - treeName = cms.string("TagAndProbe"), - isMC = cms.bool(True), - genCollection = cms.InputTag("generator"), - genPartCollection = cms.InputTag("genInfo"), - muons = cms.InputTag("goodMuons"), - taus = cms.InputTag("goodTaus"), - puInfo = cms.InputTag("slimmedAddPileupInfo"), - met = cms.InputTag("slimmedMETs"), - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - triggerList = HLTLIST, - triggerList_tag = HLTLIST_TAG, - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - - -TAndPseq = cms.Sequence( - hltFilter + - muonNumberFilter + - goodMuons + - goodTaus + - bjets + - TagAndProbe + - genInfo #+ -# genMatchedTaus -) - -genMatchSeq = cms.Sequence( - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - patTriggerUnpacker + - Ntuplizer -) diff --git a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_AOD_cff.py b/TauTagAndProbe/python/MCanalysis_noTagAndProbe_AOD_cff.py deleted file mode 100644 index ad66d025761..00000000000 --- a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_AOD_cff.py +++ /dev/null @@ -1,104 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - - -HLTLIST = cms.VPSet( - #cms.PSet ( - # HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v"), - # path1 = cms.vstring ("hltL3crIsoL1sMu16erTauJet20erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu17LooseIsoPFTau20"), - # path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterIsoMu17LooseIsoPFTau20"), - # leg1 = cms.int32(13), - # leg2 = cms.int32(15) - #) - cms.PSet ( - HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu16erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ) -) - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","RECO"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT2"), - HLTPaths = ['HLT_IsoMu18_v5'], - #HLTPaths = ['HLT_Mu7p5_L2Mu2_Jpsi_v4'], - #HLTPaths = ['HLT_IsoMu18_v3'], - #HLTPaths = ['HLT_IsoMu18_v3'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PFTauSelector", - src = cms.InputTag("hpsPFTauProducer"), - cut = cms.string( - 'pt > 18 && abs(eta) < 2.5 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' - ), - discriminators = cms.VPSet( - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByDecayModeFinding"),selectionCut=cms.double(0.5)), - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByRawCombinedIsolationDBSumPtCorr3Hits"),selectionCut=cms.double(2.5)), - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByLooseMuonRejection3"),selectionCut=cms.double(0.5)), - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByMVA6VLooseElectronRejection"),selectionCut=cms.double(0.5)), - ), - filter = cms.bool(True) -) - -from PhysicsTools.JetMCAlgos.TauGenJets_cfi import tauGenJets - -trueHadronicTaus = cms.EDFilter( - "TauGenJetDecayModeSelector", - src = cms.InputTag("tauGenJets"), - select = cms.vstring( - 'oneProng0Pi0', 'oneProng1Pi0', 'oneProng2Pi0', 'oneProngOther', - 'threeProng0Pi0', 'threeProng1Pi0', 'threeProngOther', 'rare'), - filter = cms.bool(False) -) - -genMatchedTaus = cms.EDFilter("genMatchTauFilter_AOD", - taus = cms.InputTag("goodTaus"), - genJets = cms.InputTag("trueHadronicTaus") - ) - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer_noTagAndProbe = cms.EDAnalyzer("Ntuplizer_noTagAndProbe_AOD", - treeName = cms.string("TagAndProbe"), - taus = cms.InputTag("genMatchedTaus"), - triggerSet = cms.InputTag("selectedPatTrigger"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "RECO"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT2"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #L1Tau = cms.InputTag("caloStage2Digis", "Tau", "HLT"), - #L1Tau = cms.InputTag("caloStage2Digis", "Tau", "HLT2"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - jetCollection = cms.InputTag("ak4PFJetsCHS"), - l1tJetCollection = cms.InputTag("caloStage2Digis","Jet"), - Vertexes = cms.InputTag("offlinePrimaryVertices"), - triggerList = HLTLIST -) - -TAndPseq = cms.Sequence( - #hltFilter + - #goodMuons + - tauGenJets + - trueHadronicTaus + - goodTaus + - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - Ntuplizer_noTagAndProbe -) diff --git a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_cff.py b/TauTagAndProbe/python/MCanalysis_noTagAndProbe_cff.py deleted file mode 100644 index 4c1b3fb2153..00000000000 --- a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_cff.py +++ /dev/null @@ -1,69 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - - -HLTLIST = cms.VPSet( - cms.PSet ( - HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu16erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ) -) - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu18_v5'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("slimmedTaus"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - '&& tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 2.5 ' # tau iso - NOTE: can as well use boolean discriminators with WP - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - -genMatchedTaus = cms.EDFilter("genMatchTauFilter", - taus = cms.InputTag("goodTaus") - ) - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer_noTagAndProbe = cms.EDAnalyzer("Ntuplizer_noTagAndProbe", - treeName = cms.string("TagAndProbe"), - genCollection = cms.InputTag("generator"), - taus = cms.InputTag("genMatchedTaus"), - triggerSet = cms.InputTag("slimmedPatTrigger"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - jetCollection = cms.InputTag("slimmedJets"), - l1tJetCollection = cms.InputTag("caloStage2Digis","Jet"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - triggerList = HLTLIST, - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - -TAndPseq = cms.Sequence( - goodTaus + - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - Ntuplizer_noTagAndProbe -) diff --git a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_multipleTaus_AOD_cff.py b/TauTagAndProbe/python/MCanalysis_noTagAndProbe_multipleTaus_AOD_cff.py deleted file mode 100644 index 61ef5688d53..00000000000 --- a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_multipleTaus_AOD_cff.py +++ /dev/null @@ -1,105 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - - -HLTLIST = cms.VPSet( - #cms.PSet ( - # HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v"), - # path1 = cms.vstring ("hltL3crIsoL1sMu16erTauJet20erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu17LooseIsoPFTau20"), - # path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterIsoMu17LooseIsoPFTau20"), - # leg1 = cms.int32(13), - # leg2 = cms.int32(15) - #) - cms.PSet ( - HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu16erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ) -) - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","RECO"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT2"), - HLTPaths = ['HLT_IsoMu18_v5'], - #HLTPaths = ['HLT_Mu7p5_L2Mu2_Jpsi_v4'], - #HLTPaths = ['HLT_IsoMu18_v3'], - #HLTPaths = ['HLT_IsoMu18_v3'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PFTauSelector", - src = cms.InputTag("hpsPFTauProducer"), - cut = cms.string( - 'pt > 18 && abs(eta) < 2.5 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs - ), - discriminators = cms.VPSet( - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByDecayModeFinding"),selectionCut=cms.double(0.5)), - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByRawCombinedIsolationDBSumPtCorr3Hits"),selectionCut=cms.double(2.5)), - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByLooseMuonRejection3"),selectionCut=cms.double(0.5)), - cms.PSet( discriminator=cms.InputTag("hpsPFTauDiscriminationByMVA6VLooseElectronRejection"),selectionCut=cms.double(0.5)), - ), - filter = cms.bool(True) -) - -from PhysicsTools.JetMCAlgos.TauGenJets_cfi import tauGenJets - -trueHadronicTaus = cms.EDFilter( - "TauGenJetDecayModeSelector", - src = cms.InputTag("tauGenJets"), - select = cms.vstring( - 'oneProng0Pi0', 'oneProng1Pi0', 'oneProng2Pi0', 'oneProngOther', - 'threeProng0Pi0', 'threeProng1Pi0', 'threeProngOther', 'rare'), - filter = cms.bool(False) -) - -genMatchedTaus = cms.EDFilter("genMatchTauFilter_AOD", - taus = cms.InputTag("goodTaus"), - genJets = cms.InputTag("trueHadronicTaus") - ) - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer_noTagAndProbe_multipleTaus_AOD = cms.EDAnalyzer("Ntuplizer_noTagAndProbe_multipleTaus_AOD", - treeName = cms.string("TagAndProbe"), - taus = cms.InputTag("goodTaus"), - #taus = cms.InputTag("genMatchedTaus"), - triggerSet = cms.InputTag("selectedPatTrigger"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "RECO"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT2"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #L1Tau = cms.InputTag("caloStage2Digis", "Tau", "HLT"), - #L1Tau = cms.InputTag("caloStage2Digis", "Tau", "HLT2"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - jetCollection = cms.InputTag("ak4PFJetsCHS"), - l1tJetCollection = cms.InputTag("caloStage2Digis","Jet"), - Vertexes = cms.InputTag("offlinePrimaryVertices"), - triggerList = HLTLIST -) - -TAndPseq = cms.Sequence( - #hltFilter + - #goodMuons + - tauGenJets + - trueHadronicTaus + - goodTaus + - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - Ntuplizer_noTagAndProbe_multipleTaus_AOD -) diff --git a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_multipleTaus_cff.py b/TauTagAndProbe/python/MCanalysis_noTagAndProbe_multipleTaus_cff.py deleted file mode 100644 index de3a72157b2..00000000000 --- a/TauTagAndProbe/python/MCanalysis_noTagAndProbe_multipleTaus_cff.py +++ /dev/null @@ -1,94 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on MC" - - -HLTLIST = cms.VPSet( - #cms.PSet ( - # HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v"), - # path1 = cms.vstring ("hltL3crIsoL1sMu16erTauJet20erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu17LooseIsoPFTau20"), - # path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterIsoMu17LooseIsoPFTau20"), - # leg1 = cms.int32(13), - # leg2 = cms.int32(15) - #) - cms.PSet ( - HLT = cms.string("HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu16erL1f0L2f10QL3f17QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu17LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ) -) - - - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","RECO"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - #TriggerResultsTag = cms.InputTag("TriggerResults","","HLT2"), - HLTPaths = ['HLT_IsoMu18_v5'], - #HLTPaths = ['HLT_Mu7p5_L2Mu2_Jpsi_v4'], - #HLTPaths = ['HLT_IsoMu18_v3'], - #HLTPaths = ['HLT_IsoMu18_v3'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("slimmedTaus"), - cut = cms.string( - 'pt > 18 && abs(eta) < 2.5 ' #kinematics - #'pt > 20 && abs(eta) < 2.5 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - '&& tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 2.5 ' # tau iso - NOTE: can as well use boolean discriminators with WP - #'&& tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 2.5 ' # tau iso - NOTE: can as well use boolean discriminators with WP - #'&& tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 1.0 ' # tau iso - NOTE: can as well use boolean discriminators with WP - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - -genMatchedTaus = cms.EDFilter("genMatchTauFilter", - taus = cms.InputTag("goodTaus") - ) - -# Ntuplizer.taus = cms.InputTag("genMatchedTaus") -Ntuplizer_noTagAndProbe_multipleTaus = cms.EDAnalyzer("Ntuplizer_noTagAndProbe_multipleTaus", - treeName = cms.string("TagAndProbe"), - genCollection = cms.InputTag("generator"), - taus = cms.InputTag("genMatchedTaus"), - triggerSet = cms.InputTag("selectedPatTrigger"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "RECO"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT2"), - #triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - #L1Tau = cms.InputTag("caloStage2Digis", "Tau", "HLT"), - #L1Tau = cms.InputTag("caloStage2Digis", "Tau", "HLT2"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - jetCollection = cms.InputTag("slimmedJets"), - l1tJetCollection = cms.InputTag("caloStage2Digis","Jet"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - triggerList = HLTLIST, - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - -TAndPseq = cms.Sequence( - #hltFilter + - #goodMuons + - goodTaus + - genMatchedTaus -) - -NtupleSeq = cms.Sequence( - Ntuplizer_noTagAndProbe_multipleTaus -) diff --git a/TauTagAndProbe/python/TriggerSF_plotter.py b/TauTagAndProbe/python/TriggerSF_plotter.py new file mode 100644 index 00000000000..a501207d7f6 --- /dev/null +++ b/TauTagAndProbe/python/TriggerSF_plotter.py @@ -0,0 +1,254 @@ +import argparse +from array import array +import math +import numpy as np +import os +import re +import sys +import ROOT +from ROOT import TAttFill + +ROOT.gROOT.SetBatch(True) +ROOT.TH1.SetDefaultSumw2() +ROOT.gStyle.SetOptStat(0) + +# python TriggerSF_plotter.py --era 2016 --decay-mode 0 --channels ditau --working-points Medium --inputFilePath-new $PWD --outputFilePath $PWD/Tau_Trigger_sf_plots + +parser = argparse.ArgumentParser(description='Plotter for Trigger SFs') +#parser.add_argument('--inputFilePath-old', required=True, type=str, help="input file Path for the Konstantin's old files") +parser.add_argument('--inputFilePath-new', required=True, type=str, help="input file Path for the files") +parser.add_argument('--outputFilePath', required=True, type=str, help="Name of the output file path") +parser.add_argument('--era', required=False, type=str, default='2016,2017,2018', help="Era") +parser.add_argument('--decay-mode', required=False, type=str, default='all,0,1,10,11', help="decay mode indices") +parser.add_argument('--channels', required=False, type=str, default='etau,mutau,ditau', help="channels to process") +parser.add_argument('--working-points', required=False, type=str, + default='VVVLoose,VVLoose,VLoose,Loose,Medium,Tight,VTight,VVTight', + help="working points to process") +args = parser.parse_args() + +#InputFilePath_old = args.inputFilePath_old +InputFilePath_new = args.inputFilePath_new +OutputFilePath = args.outputFilePath +Eras = args.era.split(',') +Decay_modes = args.decay_mode.split(',') +Channels = args.channels.split(',') +Working_points = args.working_points.split(',') + + +def makePlot_NewSFs(histo_dict, outputFileName): + canvasSizeX = 800 + canvasSizeY = 900 + + canvas = ROOT.TCanvas("canvas", "", canvasSizeX, canvasSizeY) + canvas.SetFillColor(10) + canvas.SetFillStyle(4000) + canvas.SetFillColor(10) + canvas.SetTicky() + canvas.SetBorderSize(2) + canvas.SetLeftMargin(0.12) + canvas.SetBottomMargin(0.12) + canvas.cd() + + histogram_true = histo_dict['sf_true'] + histogram_fake = histo_dict['sf_fake'] + + histogram_true.SetMaximum(2.0) + histogram_fake.SetMaximum(2.0) + + histogram_true.SetLineColor(2) + histogram_fake.SetLineColor(3) + + histogram_true.SetMarkerColor(2) + histogram_fake.SetMarkerColor(3) + + histogram_true.SetMarkerStyle(20) + histogram_fake.SetMarkerStyle(21) + + histogram_true.SetMarkerSize(0.7) + histogram_fake.SetMarkerSize(0.7) + + histogram_true.SetFillColor(2) + histogram_fake.SetFillColor(3) + + histogram_true.SetFillStyle(3004) + histogram_fake.SetFillStyle(3005) + + + xAxis_top = histogram_true.GetXaxis() + xAxis_top.SetTitle("Tau p_{T} (GeV)"); + xAxis_top.SetTitleOffset(1.2); + xAxis_top.SetTitleSize(0.03) + xAxis_top.SetLabelSize(0.03) + + + yAxis_top = histogram_true.GetYaxis() + yAxis_top.SetTitle("Data/MC SF") + yAxis_top.SetTitleOffset(1.2) + yAxis_top.SetTitleSize(0.03) + yAxis_top.SetLabelSize(0.03) + yAxis_top.SetTickLength(0.04) + + legendTextSize = 0.040 + legendPosX = 0.740 + legendPosY = 0.510 + legendSizeX = 0.190 + legendSizeY = 0.420 + + legend = ROOT.TLegend(0.7, 0.7, 0.85, 0.85, "", "brNDC") + legend.SetFillStyle(0) + legend.SetFillColor(10) + legend.SetTextSize(0.018) + + legend.AddEntry(histogram_true, "True taus", "f") + legend.AddEntry(histogram_fake, "Fake taus", "f") + + histogram_true.GetXaxis().SetRangeUser(20.,200.) + histogram_fake.GetXaxis().SetRangeUser(20.,200.) + + histogram_true.GetYaxis().SetRangeUser(0., 1.4) + histogram_fake.GetYaxis().SetRangeUser(0., 1.4) + + histogram_true.Draw('E2') + histogram_fake.Draw('E2 same') + + legend.Draw() + + canvas.Update() + canvas.Print(outputFileName + ".pdf") + canvas.Print(outputFileName + ".png") + canvas.Print(outputFileName + ".root") + + +def makePlot_OldSFs(histo_dict, outputFileName): + canvasSizeX = 800 + canvasSizeY = 900 + + canvas = ROOT.TCanvas("canvas", "", canvasSizeX, canvasSizeY) + canvas.SetFillColor(10) + canvas.SetFillStyle(4000) + canvas.SetFillColor(10) + canvas.SetTicky() + canvas.SetBorderSize(2) + canvas.SetLeftMargin(0.12) + canvas.SetBottomMargin(0.12) + + canvas.cd() + + #histogram_old = histo_dict['old'] + histogram_new1 = histo_dict['sub_from_data'] + histogram_new2 = histo_dict['add_to_dy_mc'] + + #histogram_old.SetMaximum(2.0) + histogram_new1.SetMaximum(2.0) + histogram_new2.SetMaximum(2.0) + + + #histogram_old.SetLineColor(2) + histogram_new1.SetLineColor(3) + histogram_new2.SetLineColor(4) + + #histogram_old.SetMarkerColor(2) + histogram_new1.SetMarkerColor(3) + histogram_new2.SetMarkerColor(4) + + #histogram_old.SetMarkerStyle(20) + histogram_new1.SetMarkerStyle(21) + histogram_new2.SetMarkerStyle(22) + + #histogram_old.SetMarkerSize(0.7) + histogram_new1.SetMarkerSize(0.7) + histogram_new2.SetMarkerSize(0.7) + + #histogram_old.SetFillColor(2) + histogram_new1.SetFillColor(3) + histogram_new2.SetFillColor(4) + + #histogram_old.SetFillStyle(3004) + histogram_new1.SetFillStyle(3005) + histogram_new2.SetFillStyle(3002) + + #xAxis_top = histogram_old.GetXaxis() + xAxis_top = histogram_new1.GetXaxis() + xAxis_top.SetTitle("Tau p_{T} (GeV)"); + xAxis_top.SetTitleOffset(1.2); + xAxis_top.SetTitleSize(0.03) + xAxis_top.SetLabelSize(0.03) + + + #yAxis_top = histogram_old.GetYaxis() + yAxis_top = histogram_new1.GetYaxis() + yAxis_top.SetTitle("Data/MC Sf") + yAxis_top.SetTitleOffset(1.2) + yAxis_top.SetTitleSize(0.03) + yAxis_top.SetLabelSize(0.03) + yAxis_top.SetTickLength(0.04) + + legendTextSize = 0.040 + legendPosX = 0.740 + legendPosY = 0.510 + legendSizeX = 0.190 + legendSizeY = 0.420 + + legend = ROOT.TLegend(0.7, 0.7, 0.85, 0.85, "", "brNDC") + legend.SetFillStyle(0) + legend.SetFillColor(10) + legend.SetTextSize(0.018) + #legend.AddEntry(histogram_old, "old", "f") + legend.AddEntry(histogram_new1, "(Data - Bg)/ZTT", "f") + legend.AddEntry(histogram_new2, "Data/(ZTT + Bg)", "f") + + #histogram_old.GetXaxis().SetRangeUser(20.,200.) ## Konstantin's old plots go till 1000 GeV but ours only till 200 GeV + histogram_new1.GetXaxis().SetRangeUser(20.,200.) + histogram_new2.GetXaxis().SetRangeUser(20.,200.) + + #histogram_old.GetYaxis().SetRangeUser(0., 1.4) + histogram_new1.GetYaxis().SetRangeUser(0., 1.4) + histogram_new2.GetYaxis().SetRangeUser(0., 1.4) + + ## --- PLOTTING ON THE SAME CANVAS ---## + #histogram_old.Draw('E2') + histogram_new1.Draw('E2 same') + histogram_new2.Draw('E2 same') + + legend.Draw() + + canvas.Update() + canvas.Print(outputFileName + ".pdf") + canvas.Print(outputFileName + ".png") + canvas.Print(outputFileName + ".root") + + +for era in Eras: + #FullInputFilePath_old = "{}/{}_tauTriggerEff_DeepTau2017v2p1.root".format(InputFilePath_old, era) + FullInputFilePath_new_sub_from_data = '{}/turn_on_{}_subtract-from-data_fitted_LATEST.root'.format(InputFilePath_new, era) + FullInputFilePath_new_add_to_dy_mc = '{}/turn_on_{}_add-to-dy-mc_fitted_LATEST.root'.format(InputFilePath_new, era) + FullInputFilePath_new_TriggerSFs = '{}/NewTriggerSFs_{}.root'.format(InputFilePath_new, era) + #f_old = ROOT.TFile.Open(FullInputFilePath_old, "READ") + f_new_sub_from_data = ROOT.TFile.Open(FullInputFilePath_new_sub_from_data, "READ") + f_new_add_to_dy_mc = ROOT.TFile.Open(FullInputFilePath_new_add_to_dy_mc, "READ") + f_new_TriggerSFs = ROOT.TFile.Open(FullInputFilePath_new_TriggerSFs, "READ") + for dm in Decay_modes: + for chn in Channels: + for wp in Working_points: + histo_dict_OldSFs = {} + histo_dict_NewSFs = {} + histoName = "sf_{}_{}_dm{}_fitted".format(chn, wp, dm) + histoName_sf_true = "{}_{}_dm{}_fitted_sf_true".format(chn, wp, dm) + histoName_sf_fake = "{}_{}_dm{}_fitted_sf_fake".format(chn, wp, dm) + #h_old = f_old.Get(histoName) + h_new_sub_from_data = f_new_sub_from_data.Get(histoName) + h_new_add_to_dy_mc = f_new_add_to_dy_mc.Get(histoName) + h_sf_true = f_new_TriggerSFs.Get(histoName_sf_true) + h_sf_fake = f_new_TriggerSFs.Get(histoName_sf_fake) + outFileName_OldSFs = "{}/sf_{}_{}_{}_dm{}_fitted".format(OutputFilePath, era, chn, wp, dm) + outFileName_NewSFs = "{}/New_sf_{}_{}_{}_dm{}_fitted".format(OutputFilePath, era, chn, wp, dm) + #histo_dict_OldSFs["old"] = h_old + histo_dict_OldSFs["sub_from_data"] = h_new_sub_from_data + histo_dict_OldSFs["add_to_dy_mc"] = h_new_add_to_dy_mc + histo_dict_NewSFs["sf_true"] = h_sf_true + histo_dict_NewSFs["sf_fake"] = h_sf_fake + makePlot_OldSFs(histo_dict_OldSFs, outFileName_OldSFs) + makePlot_NewSFs(histo_dict_NewSFs, outFileName_NewSFs) + #f_old.Close() + f_new_sub_from_data.Close() + f_new_add_to_dy_mc.Close() diff --git a/TauTagAndProbe/python/compareTurnOn.py b/TauTagAndProbe/python/compareTurnOn.py new file mode 100644 index 00000000000..e328865f8a9 --- /dev/null +++ b/TauTagAndProbe/python/compareTurnOn.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python + +import argparse +from array import array +import math +import numpy as np +import re +import sys +import ROOT + +parser = argparse.ArgumentParser(description='Copmare turn-on curves.') +parser.add_argument('--input', required=True, type=str, nargs='+', help="input file") +parser.add_argument('--labels', required=True, type=str, help="labels for both inputs") +parser.add_argument('--pattern', required=True, type=str, help="trigger name pattern") +parser.add_argument('--selection', required=True, type=str, help="tau selection") +parser.add_argument('--output', required=True, type=str, help="output file prefix") +parser.add_argument('--vars', required=True, type=str, help="variables to draw") +parser.add_argument('--max-pt', required=False, type=float, default=None, help="max tau pt") +parser.add_argument('--max-gen-pt', required=False, type=float, default=None, help="max visible gen tau pt") +parser.add_argument('--min-hlt-pt', required=False, type=float, default=None, + help="minimal pt requiremet for the best matched HLT object to consider that the path is passed") +parser.add_argument('--deltaRThr', required=False, type=float, default=0.5, help="delta R threshold for HLT matching") +parser.add_argument('--min-l1-pt', required=False, type=float, default=None, + help="minimal pt requiremet for the best matched L1 object to consider that the path is passed") +args = parser.parse_args() + +sys.path.insert(0, 'Common/python') +from AnalysisTypes import * +from AnalysisTools import * +import RootPlotting +import TriggerConfig +ROOT.gROOT.SetBatch(True) +ROOT.TH1.SetDefaultSumw2() +RootPlotting.ApplyDefaultGlobalStyle() + +ccp_methods = ''' +int FindBestMatchedHLTObject(float tau_eta, float tau_phi, ULong64_t match_mask, float deltaRThr, + const ROOT::VecOps::RVec& hltObj_eta, const ROOT::VecOps::RVec& hltObj_phi, + const ROOT::VecOps::RVec& hltObj_hasPathName, + const ROOT::VecOps::RVec& hltObj_hasFilters_2) +{ + int best_match_index = -1; + float best_deltaR2 = std::pow(deltaRThr, 2); + for(size_t n = 0; n < hltObj_eta.size(); ++n) { + //if((match_mask & hltObj_hasPathName.at(n) & hltObj_hasFilters_2.at(n)) == 0) continue; + if((match_mask & hltObj_hasPathName.at(n)) == 0) continue; + const float deta = tau_eta - hltObj_eta.at(n); + const float dphi = ROOT::Math::VectorUtil::Phi_mpi_pi(tau_phi - hltObj_phi.at(n)); + const float deltaR2 = std::pow(deta, 2) + std::pow(dphi, 2); + if(deltaR2 >= best_deltaR2) continue; + best_match_index = static_cast(n); + best_deltaR2 = deltaR2; + } + return best_match_index; +} +''' +ROOT.gInterpreter.Declare(ccp_methods) + +def ReportHLTPaths(hlt_paths, label): + if len(hlt_paths) == 0: + raise RuntimeError("No HLT path match the pattern for {}".format(label)) + line = 'HLT path for {}:'.format(label) + for name in hlt_paths: + line += ' {}'.format(name) + print(line) + +def CreateBins(var_name, max_tau_pt, max_gen_pt): + if var_name in [ 'tau_pt', 'tau_gen_vis_pt' ]: + max_pt = 200 + if var_name == 'tau_pt' and max_tau_pt is not None: + max_pt = max(max_tau_pt, 100) + if var_name == 'tau_gen_vis_pt' and max_gen_pt is not None: + max_pt = max(max_gen_pt, 100) + bins = np.arange(10, 100, step=10) + high_pt_bins = [ 100, 150, 200, 300, 400, 500, 650, 800, 1000 ] + n = 0 + while n < len(high_pt_bins) and high_pt_bins[n] < max_pt: + n += 1 + use_logx = max_pt > 200 + return np.append(bins, high_pt_bins[0:n+1]), use_logx, True + elif var_name in [ 'tau_eta', 'tau_gen_vis_eta' ]: + return np.linspace(-2.3, 2.3, 20), False, False + elif var_name in [ 'npu', 'npv' ]: + return np.linspace(0, 80, 20), False, False + raise RuntimeError("Can't find binning for \"{}\"".format(var_name)) + +def GetTitle(var_name, axis_title=False): + titles = { + 'tau_pt': ('#tau p_{T}', '#tau p_{T} (GeV)'), + 'tau_gen_vis_pt': ('visible gen #tau p_{T}', 'visible gen #tau p_{T} (GeV)'), + 'tau_eta': ('#tau #eta', ), + 'tau_gen_vis_eta': ('visible gen #tau #eta', ), + 'npv': ('Number of reconstructed PV', '# reco PV'), + 'npu': ('Number of generated PV', '# gen PV'), + } + if var_name in titles: + index = min(int(axis_title), len(titles[var_name]) - 1) + return titles[var_name][index] + return var_name + +def CreateHistograms(input_file, selection_id, hlt_paths, label, vars, hist_models, output_file): + df = ROOT.RDataFrame('events', input_file) + df = df.Filter('(tau_sel & {}) != 0 && abs(tau_eta) < 2.3'.format(selection_id)) + if args.max_gen_pt is not None: + df = df.Filter('tau_gen_vis_pt < {}'.format(args.max_gen_pt)) + match_mask = 0 + for path_name, path_index in hlt_paths.items(): + match_mask = match_mask | (1 << path_index) + if selection_id == TauSelection.gen: + tau_eta_branch = 'tau_gen_vis_eta' + tau_phi_branch = 'tau_gen_vis_phi' + else: + tau_eta_branch = 'tau_eta' + tau_phi_branch = 'tau_phi' + df_passed = df.Filter('(hlt_accept & {}ULL) != 0'.format(match_mask)) \ + .Define('matched_hlt_idx', + '''FindBestMatchedHLTObject({}, {}, {}ULL, {}, hltObj_eta, hltObj_phi, + hltObj_hasPathName, hltObj_hasFilters_2)''' \ + .format(tau_eta_branch, tau_phi_branch, match_mask, args.deltaRThr)) \ + .Filter('matched_hlt_idx >= 0') + + if args.min_hlt_pt is not None: + df_passed = df_passed.Filter('hltObj_pt.at(matched_hlt_idx) > {}'.format(args.min_hlt_pt)) + if args.min_l1_pt is not None: + df_passed = df_passed.Filter('l1Tau_pt > {}'.format(args.min_l1_pt)) + hist_total = {} + hist_passed = {} + for var_id in range(len(vars)): + var = vars[var_id] + hist_total[var] = df.Histo1D(hist_models[var], var) + hist_passed[var] = df_passed.Histo1D(hist_models[var], var) + + eff = {} + for var_id in range(len(vars)): + var = vars[var_id] + eff[var] = ROOT.TEfficiency(hist_passed[var].GetPtr(), hist_total[var].GetPtr()) + eff[var].SetStatisticOption(ROOT.TEfficiency.kFCP) + output_file.WriteTObject(hist_total[var].GetPtr(), '{}_total_{}'.format(label, var), 'Overwrite') + output_file.WriteTObject(hist_passed[var].GetPtr(), '{}_passed_{}'.format(label, var), 'Overwrite') + output_file.WriteTObject(eff[var], '{}_eff_{}'.format(label, var), 'Overwrite') + return hist_passed, hist_total, eff + +selection_id = ParseEnum(TauSelection, args.selection) +print('Tau selection: {}'.format(args.selection)) + +n_inputs = len(args.input) +labels = args.labels.split(',') +vars = args.vars.split(',') + +if len(labels) != n_inputs: + raise RuntimeError("Inconsitent number of inputs = {} and number of labels = {}".format(n_inputs, len(labels))) + +trigger_dict = [None] * n_inputs +hlt_paths = [None] * n_inputs +for input_id in range(n_inputs): + trigger_dict[input_id] = TriggerConfig.LoadTriggerDictionary(args.input[input_id]) + hlt_paths[input_id] = TriggerConfig.GetMatchedTriggers(trigger_dict[input_id], args.pattern) + ReportHLTPaths(hlt_paths[input_id], labels[input_id]) + +output_file = ROOT.TFile(args.output + '.root', 'RECREATE') + +bins = {} +x_scales = {} +divide_by_bw = {} +hist_models = {} +for var_id in range(len(vars)): + var = vars[var_id] + bins[var], x_scales[var], divide_by_bw[var] = CreateBins(var, args.max_pt, args.max_gen_pt) + hist_models[var] = ROOT.RDF.TH1DModel(var, var, len(bins[var]) - 1, array('d', bins[var])) + +hist_passed = [None] * n_inputs +hist_total = [None] * n_inputs +eff = [None] * n_inputs + +for input_id in range(n_inputs): + hist_passed[input_id], hist_total[input_id], eff[input_id] = \ + CreateHistograms(args.input[input_id], selection_id, hlt_paths[input_id], labels[input_id], vars, hist_models, + output_file) + +colors = [ROOT.kBlue, ROOT.kRed] +canvas = RootPlotting.CreateCanvas() + +target_names = [ 'passed', 'total', 'efficiency' ] +is_efficiency = [ False, False, True] +targets = [ hist_passed, hist_total, eff ] + +n_plots = len(vars) * len(targets) +plot_id = 0 +for var_id in range(len(vars)): + var = vars[var_id] + for target_id in range(len(targets)): + ratio_graphs = {} + ref_hist = hist_models[var].GetHistogram() + ratio_ref_hist = ref_hist.Clone() + y_title = 'Efficiency' if is_efficiency[target_id] else 'arb.' + curves = [None] * n_inputs + for input_id in range(n_inputs): + curve = targets[target_id][input_id][var] + if 'RResultPtr' in str(type(curve)): + curve = curve.GetPtr() + if not is_efficiency[target_id]: + curve = curve.Clone() + curve.Scale(1. / curve.Integral()) + if divide_by_bw[var]: + RootPlotting.DivideByBinWidth(curve) + curves[input_id] = curve + if is_efficiency[target_id]: + y_min, y_max = (0, 1) + else: + y_min = 0 + _, y_max = RootPlotting.GetYRange(curves) + + title = '{}: {}'.format(GetTitle(var, False), target_names[target_id]) + plain_title = '{}_{}'.format(var, target_names[target_id]) + main_pad, ratio_pad, title_controls = RootPlotting.CreateTwoPadLayout(canvas, ref_hist, ratio_ref_hist, + log_x=x_scales[var], title=title) + RootPlotting.ApplyAxisSetup(ref_hist, ratio_ref_hist, x_title=GetTitle(var, True), y_title=y_title, + ratio_y_title='Ratio', y_range=(y_min, y_max * 1.1), max_ratio=1.5) + legend = RootPlotting.CreateLegend(pos=(0.18, 0.78), size=(0.2, 0.15)) + for input_id in range(n_inputs): + curve = curves[input_id] + curve.Draw('SAME') + RootPlotting.ApplyDefaultLineStyle(curve, colors[input_id]) + legend.AddEntry(curve, labels[input_id], 'PLE') + + if input_id < n_inputs - 1: + if is_efficiency[target_id]: + ratio_graphs[var] = RootPlotting.CreateEfficiencyRatioGraph(hist_passed[input_id][var], + hist_total[input_id][var], + hist_passed[-1][var], + hist_total[-1][var]) + else: + ratio_hist = curve.Clone() + ratio_hist.Divide(curves[-1]) + ratio_graphs[var] = RootPlotting.HistogramToGraph(ratio_hist) + if ratio_graphs[var]: + output_file.WriteTObject(ratio_graphs[var], + '{}_ratio_{}_{}'.format(var, labels[input_id], labels[-1]), + 'Overwrite') + ratio_pad.cd() + ratio_color = colors[input_id] if n_inputs > 2 else ROOT.kBlack + RootPlotting.ApplyDefaultLineStyle(ratio_graphs[var], ratio_color) + ratio_graphs[var].Draw("0PE SAME") + main_pad.cd() + legend.Draw() + + canvas.Update() + output_file.WriteTObject(canvas, '{}_canvas'.format(plain_title), 'Overwrite') + RootPlotting.PrintAndClear(canvas, args.output + '.pdf', plain_title, plot_id, n_plots, [main_pad, ratio_pad]) + plot_id += 1 +output_file.Close() diff --git a/TauTagAndProbe/python/computeTriggerSFs.py b/TauTagAndProbe/python/computeTriggerSFs.py new file mode 100644 index 00000000000..d960eadf519 --- /dev/null +++ b/TauTagAndProbe/python/computeTriggerSFs.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python + +import argparse +from array import array +import math +import numpy as np +import os +import re +import sys +import ROOT + +parser = argparse.ArgumentParser(description='Estimate QCD backgrounds.') +parser.add_argument('--input_stage2p5', required=True, type=str, default='', help="input Stage 2.5 file") +parser.add_argument('--input_signal', required=True, type=str, default='', help="input Signal region root file with fitted turn ons") +parser.add_argument('--input_w_enriched', required=True, type=str, default='', help="input W-enriched region root file w fitted turn ons") +parser.add_argument('--channels', required=False, type=str, default='etau,mutau,ditau', help="channels to process") +parser.add_argument('--decay-modes', required=False, type=str, default='all,0,1,10,11', help="decay modes to process") +parser.add_argument('--working-points', required=False, type=str, + default='VVVLoose,VVLoose,VLoose,Loose,Medium,Tight,VTight,VVTight', + help="working points to process") +parser.add_argument('--output', required=True, type=str, help="output file name") +args = parser.parse_args() + +if ((args.input_stage2p5 == '') or (args.input_signal == '') or (args.input_w_enriched == '')): + raise ValueError("One or more of these configuration parameters are Invalid = '%s', '%s', '%s' !!" % (args.input_stage2p5, args.input_signal, args.input_w_enriched)) + +path_prefix = '' if 'TauTriggerTools' in os.getcwd() else 'TauTriggerTools/' +sys.path.insert(0, path_prefix + 'Common/python') +from AnalysisTypes import * +from AnalysisTools import * +import RootPlotting +ROOT.ROOT.EnableImplicitMT(4) +ROOT.gROOT.SetBatch(True) +ROOT.TH1.SetDefaultSumw2() # CV: This does not seem to work... all bin-error are the exact square-root of the bin-contents !! +ROOT.gInterpreter.Declare('#include "{}TauTagAndProbe/interface/PyInterface.h"'.format(path_prefix)) +RootPlotting.ApplyDefaultGlobalStyle() + +#---------------------------------------------------------------------------------------------------- +# define integer constants +type_data = 0 +type_ztt_mc = 1 +type_zmm_mc = 2 +type_w_mc = 3 +type_ttbar_mc = 4 + +selection_OS_low_mT = 0 +selection_OS_high_mT = 1 +selection_SS_low_mT = 2 +selection_SS_high_mT = 3 +selection_signal = 4 +selection_w_enriched = 5 +#---------------------------------------------------------------------------------------------------- + +def get_type(process): + if process == "data": + return type_data + elif process == "ztt-mc": + return type_ztt_mc + elif process == "zmm-mc": + return type_zmm_mc + elif process == "w-mc": + return type_w_mc + elif process == "ttbar-mc": + return type_ttbar_mc + else: + raise ValueError("Invalid function argument: process = '%s' !!" % process) +#---------------------------------------------------------------------------------------------------- + +def dumpHistogram(histogram, histogramName): + print("histogram: ", histogramName) + for idxBin in range(histogram.GetNbinsX()): + print(" bin #%i: bin-center = %1.2f: bin-content = %1.2f +/- %1.2f" % (idxBin + 1, histogram.GetXaxis().GetBinCenter(idxBin + 1), histogram.GetBinContent(idxBin + 1), histogram.GetBinError(idxBin + 1))) + +def makeBinContentsPositive(histogramName, histogram): + print(": histogram = %s" % histogramName) + integral_original = histogram.Integral() + for i in range(histogram.GetNbinsX()): + binContent = histogram.GetBinContent(i + 1) + binError = histogram.GetBinError(i + 1) + if binContent < 0.: + histogram.SetBinContent(i + 1, 0.) + histogram.SetBinError(i + 1, math.sqrt(binContent**2 + binError**2)) + integral_modified = histogram.Integral() + print("integral: original = %1.2f, modified = %1.2f" % (integral_original, integral_modified)) + if integral_modified > 0.: + histogram.Scale(integral_original/integral_modified) + +def extractBinEdges(histogramName, histogram): + print(": histogram = %s" % histogramName) + print(" histogram.GetNbinsX() ", histogram.GetNbinsX()) + bin_list = [0.] + for i in range(histogram.GetNbinsX()): + hist_BinLowEdge = histogram.GetBinLowEdge(i + 1) + bin_list.append(hist_BinLowEdge) + bin_array = np.array(bin_list) + #print("bin_array ", bin_array) + return bin_array + + +class AlphaInfo: + def __init__(self): + self.hist_alpha = None + self.hist_one_minus_alpha = None + self.eff = None + +def IncreaseBinning(alpha_orig, AlphaInfo_Obj, var, bins): + alpha = ROOT.TH1F("alpha", var, len(bins) - 1, array('d', bins)) + one_minus_alpha = alpha.Clone() + for n in range(1, alpha.GetNbinsX() + 1): + x = alpha.GetBinCenter(n) + bin_orig = alpha_orig.FindFixBin(x) + alpha.SetBinContent(n, alpha_orig.GetBinContent(bin_orig)) + alpha.SetBinError(n, alpha_orig.GetBinError(bin_orig)) + one_minus_alpha.SetBinContent(n, 1 - alpha_orig.GetBinContent(bin_orig)) + one_minus_alpha.SetBinError(n, alpha_orig.GetBinError(bin_orig)) + AlphaInfo_Obj.hist_alpha = alpha + AlphaInfo_Obj.hist_one_minus_alpha = one_minus_alpha + +def ComputeQCD(df, hist_model, var, branchname_weight, sf_qcd_SS_to_OS = 1.0): + df_SS = df.Filter("selection == {}".format(selection_SS_low_mT)) + hist_qcd = (df_SS.Filter("type == {}".format(type_data))).Histo1D(hist_model, var, branchname_weight) + hist_mc = (df_SS.Filter(''' + type == {} or type == {} or type == {} or type == {} + '''.format(type_ztt_mc, type_zmm_mc, type_w_mc, type_ttbar_mc))).Histo1D(hist_model, var, branchname_weight) + print("SS Data :", hist_qcd.Integral()) + print("SS MC :", hist_mc.Integral()) + #print("type(hist_qcd) :", type(hist_qcd)) + #print("type(hist_mc) :", type(hist_mc)) + hist_qcd.Add(hist_mc.GetPtr(), -1) + hist_qcd.Scale(sf_qcd_SS_to_OS) + print("QCD yield computed :", hist_qcd.Integral()) + if(hist_qcd.Integral() < 0.): + print("Data driven QCD yield is -ve, setting it to zero by hand") + hist_qcd.Reset() + #makeBinContentsPositive("hist_qcd", hist_qcd) ## Fix for negative bins + print("QCD final yield :", hist_qcd.Integral()) + return hist_qcd + +def Compute_eff_data_true(AlphaInfo_Obj, eff_data_sgn, eff_data_fake): + eff_data_true_tmp = eff_data_fake.Clone() + eff_data_true = eff_data_fake.Clone() + eff_data_true.Reset() + one_minus_alpha = AlphaInfo_Obj.hist_one_minus_alpha.Clone() + alpha = AlphaInfo_Obj.hist_alpha.Clone() + eff_data_true.Multiply(eff_data_true_tmp, one_minus_alpha, 1, -1) + eff_data_true.Add(eff_data_sgn) + eff_data_true.Divide(alpha) + eff_data_true.SetName("eff_data_true") + return eff_data_true + +def ComputeSF(input_files, output_file, + branchname_weight, channels, + decay_modes, discr_name, + working_points, + var, sf_qcd_SS_to_OS): + print(":") + print("Stage 2.5 input_file = '%s'" % input_files[0]) + df = ROOT.RDataFrame('events', input_files[0]) + print("Fitted TurnOn signal region input file = '%s'" % input_files[1]) + file_signal = ROOT.TFile(input_files[1], "READ") + print("Fitted TurnOn w-enriched region input file = '%s'" % input_files[2]) + file_w_enriched = ROOT.TFile(input_files[2], "READ") + + dm_labels = {} + turnon_label = "" + for dm in decay_modes: + print("decay mode :", dm) + if dm == 'all': + dm_labels[dm] = '' + df_dm = df + else: + dm_labels[dm] = '_dm{}'.format(dm) + df_dm = df.Filter('tau_decayMode == {}'.format(dm)) + for wp in working_points: + print("Working point :", wp) + wp_bit = ParseEnum(DiscriminatorWP, wp) + df_wp = df_dm.Filter('({} & (1 << {})) != 0'.format(discr_name, wp_bit)) + for channel in channels: + print("channel :", channel) + turnon_label = "{}_{}_dm{}_fitted".format(channel, wp, dm) + print("turnon_label: ", turnon_label) + bin_list = [20., 30., 40., 50., 60., 80., 100., 1000.] ## Konstantin's proposed binning for alpha_orig + bins = np.array(bin_list) + hist_models = { + 'plot': ROOT.RDF.TH1DModel(var, var, len(bins) - 1, array('d', bins)) + } + df_ch = df_wp.Filter('pass_{} > 0.5'.format(channel)) + for model_name, hist_model in hist_models.items(): + A = AlphaInfo() + hist_qcd = ComputeQCD(df_ch, hist_model, var, branchname_weight, sf_qcd_SS_to_OS) ## TO BE IMPLEMENTED + hist_qcd_clone = hist_qcd.Clone() + dumpHistogram(hist_qcd_clone, "hist_qcd") + hist_signal = (df_ch.Filter(''' + selection == {} && type == {} + '''.format(selection_signal, type_ztt_mc))).Histo1D(hist_model, var, branchname_weight) + hist_mc_bkg = (df_ch.Filter(''' + selection == {} && ((type == {}) or (type == {}) or (type == {})) + '''.format(selection_signal, type_w_mc, type_ttbar_mc, type_zmm_mc))).Histo1D(hist_model, var, branchname_weight) + hist_total = hist_mc_bkg.Clone() ## W_mc + TT_mc + Zmm_mc + print("hist_mc_bkg = (W_mc + TT_mc + Zmm_mc) : ", hist_total.Integral()) + #dumpHistogram(hist_total, "hist_mc_bkg") + #print("type(hist_qcd) :", type(hist_qcd)) + #print("type(hist_total) :", type(hist_total)) + hist_total.Add(hist_qcd.GetPtr()) ## Total Bg = W_mc + TT_mc + Zmm_mc + QCD + hist_total_bkg = hist_total.Clone() + print("hist_total = (W_mc + TT_mc + Zmm_mc + QCD) : ", hist_total.Integral()) + #dumpHistogram(hist_total, "hist_mc_bkg + QCD") + alpha_orig = hist_signal.Clone() ## Numerator for alpha_orig = Signal = Ztt_mc + print("hist_signal = Ztt_mc : ", hist_signal.Integral()) + #dumpHistogram(alpha_orig, "hist_signal") + hist_total.Add(hist_signal.GetPtr()) ## Denominator for alpha_orig = Signal + Total Bg + print("hist_total = (W_mc + TT_mc + Zmm_mc + QCD) + Ztt_mc : ", hist_total.Integral()) + #print("type(alpha_orig) :", type(alpha_orig)) + #print("type(hist_total) :", type(hist_total)) + #dumpHistogram(hist_total, "hist_total") + alpha_orig.Divide(hist_total) ## alpha_orig = Signal/(Signal + Total Bg) + #hist_signal.Divide(hist_total.GetPtr()) ## Signal/(Signal + Total Bg) + #dumpHistogram(alpha_orig, "alpha_orig") + + mc_turnon_label = "mc_" + turnon_label + data_turnon_label = "data_" + turnon_label + + eff_mc_true = file_signal.Get(mc_turnon_label) + eff_mc_true.SetName("eff_mc_true") + eff_mc_true_orig = eff_mc_true.Clone() + + eff_data_sgn = file_signal.Get(data_turnon_label) + eff_data_sgn.SetName("eff_data_sgn") + eff_data_sgn_orig = eff_data_sgn.Clone() + #dumpHistogram(eff_data_sgn_orig, "eff_data_sgn_orig") + + eff_mc_fake = file_w_enriched.Get(mc_turnon_label) + #print("type(eff_mc_fake) ", type(eff_mc_fake)) + #print("eff_mc_fake.Integral() ", eff_mc_fake.Integral()) + eff_mc_fake.SetName("eff_mc_fake") + eff_mc_fake_orig = eff_mc_fake.Clone() + + eff_data_fake = file_w_enriched.Get(data_turnon_label) + eff_data_fake.SetName("eff_data_fake") + eff_data_fake_orig = eff_data_fake.Clone() + #dumpHistogram(eff_data_fake_orig, "eff_data_fake") + + bins_w_enriched = extractBinEdges(eff_data_fake.GetName(), eff_data_fake) + bins_signal = extractBinEdges(eff_data_sgn.GetName(), eff_data_sgn) + if(np.array_equal(bins_signal,bins_w_enriched)): + print("Signal and W-enriched arrays are identical") + bins = bins_w_enriched + print("bins_signal ", bins_signal) + else: + raise ValueError("Signal and W-enriched arrays are not equal !!") + bins_alpha_orig = extractBinEdges("alpha_orig: ", alpha_orig) + print("bins_alpha_orig: ", bins_alpha_orig) + IncreaseBinning(alpha_orig, A, var, bins) + bins_alpha_final = extractBinEdges("A.hist_alpha: ", A.hist_alpha) + print("bins_alpha_final: ", bins_alpha_final) + #dumpHistogram(A.hist_alpha, "A.hist_alpha") + #dumpHistogram(A.hist_one_minus_alpha, "A.hist_one_minus_alpha") + eff_data_true = Compute_eff_data_true(A, eff_data_sgn, eff_data_fake) + eff_data_true_orig = eff_data_true.Clone() + #dumpHistogram(eff_data_true, "eff_data_true") + #dumpHistogram(eff_mc_true, "eff_mc_true") + eff_data_true.Divide(eff_mc_true) + eff_data_true.SetName("sf_true_taus") + #dumpHistogram(eff_data_true, "sf_true") + eff_data_fake.Divide(eff_mc_fake) + eff_data_fake.SetName("sf_fake_taus") + out_name_pattern = '{}_{{}}'.format(turnon_label) + output_file.WriteTObject(eff_data_true, out_name_pattern.format('sf_true'), 'Overwrite') + output_file.WriteTObject(eff_data_fake, out_name_pattern.format('sf_fake'), 'Overwrite') + output_file.WriteTObject(hist_signal.GetPtr(), out_name_pattern.format('hist_signal'), 'Overwrite') + output_file.WriteTObject(hist_mc_bkg.GetPtr(), out_name_pattern.format('hist_mc_bkg'), 'Overwrite') + output_file.WriteTObject(hist_qcd.GetPtr(), out_name_pattern.format('hist_qcd'), 'Overwrite') + output_file.WriteTObject(hist_total_bkg, out_name_pattern.format('hist_total_bkg'), 'Overwrite') + output_file.WriteTObject(hist_total, out_name_pattern.format('hist_total'), 'Overwrite') + output_file.WriteTObject(alpha_orig, out_name_pattern.format('alpha_orig'), 'Overwrite') + output_file.WriteTObject(A.hist_alpha, out_name_pattern.format('alpha'), 'Overwrite') + output_file.WriteTObject(A.hist_one_minus_alpha, out_name_pattern.format('one_minus_alpha'), 'Overwrite') + output_file.WriteTObject(eff_data_sgn_orig, out_name_pattern.format('eff_data_sgn'), 'Overwrite') + output_file.WriteTObject(eff_data_true_orig, out_name_pattern.format('eff_data_true'), 'Overwrite') + output_file.WriteTObject(eff_data_fake_orig, out_name_pattern.format('eff_data_fake'), 'Overwrite') + output_file.WriteTObject(eff_mc_true_orig, out_name_pattern.format('eff_mc_true'), 'Overwrite') + output_file.WriteTObject(eff_mc_fake_orig, out_name_pattern.format('eff_mc_fake'), 'Overwrite') + file_signal.Close() + file_w_enriched.Close() + output_file.Close() + print('All done.') + + +output_file = ROOT.TFile('{}.root'.format(args.output), 'RECREATE', '', ROOT.RCompressionSetting.EDefaults.kUseSmallest) +input_files = [args.input_stage2p5, args.input_signal, args.input_w_enriched] +n_inputs = len(input_files) - 2 +branchnames_weight = ['weight'] +labels = ['alpha' ] +idx_alpha = 0 +sf_qcd_SS_to_OS = 1.0 ## SS to OS extrapolation factor for QCD bg +var = 'tau_pt' +title, x_title = '#tau p_{T}', '#tau p_{T} (GeV)' +channels = args.channels.split(',') +decay_modes = args.decay_modes.split(',') +working_points = args.working_points.split(',') + +alpha = [ None ] * n_inputs ## List of dictionaries +for input_id in range(n_inputs): + print("Creating {} histograms...".format(labels[input_id])) + alpha[input_id] = ComputeSF(input_files, output_file, + branchnames_weight[input_id], channels, + decay_modes, 'byDeepTau2017v2p1VSjet', + working_points, var, sf_qcd_SS_to_OS) diff --git a/TauTagAndProbe/python/createTrunOn.py b/TauTagAndProbe/python/createTrunOn.py new file mode 100644 index 00000000000..bd6a987c7be --- /dev/null +++ b/TauTagAndProbe/python/createTrunOn.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python + +import argparse +from array import array +import math +import numpy as np +import os +import re +import sys +import ROOT + +parser = argparse.ArgumentParser(description='Create turn on curves.') +parser.add_argument('--input-data', required=True, type=str, help="skimmed data input") +parser.add_argument('--input-dy-mc', required=True, type=str, help="skimmed DY MC input") +parser.add_argument('--output', required=True, type=str, help="output file prefix") +parser.add_argument('--channels', required=False, type=str, default='etau,mutau,ditau', help="channels to process") +parser.add_argument('--decay-modes', required=False, type=str, default='all,0,1,10,11', help="decay modes to process") +parser.add_argument('--working-points', required=False, type=str, + default='VVVLoose,VVLoose,VLoose,Loose,Medium,Tight,VTight,VVTight', + help="working points to process") +parser.add_argument('--branchname-weight-data', required=True, type=str, help="branchname for event weights for data input") +parser.add_argument('--branchname-weight-dy-mc', required=True, type=str, help="branchname for event weights for DY MC input") + +args = parser.parse_args() + +if not(args.branchname_weight_data == "weight" or args.branchname_weight_data == "final_weight"): + raise ValueError("Invalid configuration parameter branchname-weight-data = '%s' !!" % args.branchname_weight_data) +if not(args.branchname_weight_dy_mc == "weight" or args.branchname_weight_dy_mc == "final_weight"): + raise ValueError("Invalid configuration parameter branchname-weight-dy-mc = '%s' !!" % args.branchname_weight_dy_mc) + +path_prefix = '' if 'TauTriggerTools' in os.getcwd() else 'TauTriggerTools/' +sys.path.insert(0, path_prefix + 'Common/python') +from AnalysisTypes import * +from AnalysisTools import * +import RootPlotting +ROOT.ROOT.EnableImplicitMT(4) +ROOT.gROOT.SetBatch(True) +ROOT.TH1.SetDefaultSumw2() # CV: This does not seem to work... all bin-error are the exact square-root of the bin-contents !! +RootPlotting.ApplyDefaultGlobalStyle() + +def CreateBins(max_pt, for_fitting): + bins = None + epsilon = 1.e-3 # CV: increase x-axis range a little bit, to make sure that upper limit (120 GeV) gets added to list of bin-edges + if for_fitting: + bins = np.arange(20, 40, step=1) + bins = np.append(bins, np.arange(40, 60, step=2)) + bins = np.append(bins, np.arange(60, 80, step=5)) + bins = np.append(bins, np.arange(80, 120, step=10)) + bins = np.append(bins, np.arange(120, 160, step=20)) + bins = np.append(bins, np.arange(160, 200+epsilon, step=40)) + else: + bins = np.arange(20, 40, step=1) + bins = np.append(bins, np.arange(40, 60, step=2)) + bins = np.append(bins, np.arange(60, 80, step=5)) + bins = np.append(bins, np.arange(80, 120, step=10)) + bins = np.append(bins, np.arange(120, 160, step=20)) + bins = np.append(bins, np.arange(160, 200+epsilon, step=40)) + use_logx = max_pt > 200+epsilon + return bins, use_logx + +class TurnOnData: + def __init__(self): + self.hist_total = None + self.hist_passed = None + self.eff = None + +##def dumpHistogram(histogram): +## for idxBin in range(histogram.GetNbinsX()): +## print(" bin #%i: bin-content = %1.2f +/- %1.2f" % (idxBin + 1, histogram.GetBinContent(idxBin + 1), histogram.GetBinError(idxBin + 1))) + +def CreateHistograms(input_file, branchname_weight, + channels, decay_modes, discr_name, + working_points, hist_models, label, var): + ##print(":") + ##print(" input_file = '%s'" % input_file) + ##print(" branchname_weight = '%s'" % branchname_weight) + df = ROOT.RDataFrame('events', input_file) + turnOn_data = {} + dm_labels = {} + + for dm in decay_modes: + if dm == 'all': + dm_labels[dm] = '' + df_dm = df + else: + dm_labels[dm] = '_dm{}'.format(dm) + df_dm = df.Filter('tau_decayMode == {}'.format(dm)) + turnOn_data[dm] = {} + for wp in working_points: + wp_bit = ParseEnum(DiscriminatorWP, wp) + df_wp = df_dm.Filter('({} & (1 << {})) != 0'.format(discr_name, wp_bit)) + turnOn_data[dm][wp] = {} + for channel in channels: + turnOn_data[dm][wp][channel] = {} + df_ch = df_wp.Filter('pass_{} > 0.5'.format(channel)) + for model_name, hist_model in hist_models.items(): + turn_on = TurnOnData() + turn_on.hist_total = df_wp.Histo1D(hist_model, var, branchname_weight) + ##print("hist_total:") + ##dumpHistogram(turn_on.hist_total) + turn_on.hist_passed = df_ch.Histo1D(hist_model, var, branchname_weight) + ##print("hist_passed:") + ##dumpHistogram(turn_on.hist_passed) + turnOn_data[dm][wp][channel][model_name] = turn_on + + return turnOn_data + +output_file = ROOT.TFile(args.output + '.root', 'RECREATE') +input_files = [ args.input_data, args.input_dy_mc ] +n_inputs = len(input_files) +branchnames_weight = [ args.branchname_weight_data, args.branchname_weight_dy_mc ] +labels = [ 'data', 'mc' ] +idx_data = 0 +idx_mc = 1 +var = 'tau_pt' +title, x_title = '#tau p_{T}', '#tau p_{T} (GeV)' +decay_modes = args.decay_modes.split(',') +channels = args.channels.split(',') +working_points = args.working_points.split(',') +bins, use_logx = CreateBins(200, False) +bins_fit, _ = CreateBins(200, True) +hist_models = { + 'plot': ROOT.RDF.TH1DModel(var, var, len(bins) - 1, array('d', bins)), + 'fit': ROOT.RDF.TH1DModel(var, var, len(bins_fit) - 1, array('d', bins_fit)) +} +turnOn = [ None ] * n_inputs +for input_id in range(n_inputs): + print("Creating {} histograms...".format(labels[input_id])) + turnOn[input_id] = CreateHistograms(input_files[input_id], branchnames_weight[input_id], + channels, decay_modes, 'byDeepTau2017v2p1VSjet', + working_points, hist_models, labels[input_id], var) +dm_labels = {} +for dm in decay_modes: + if dm == 'all': + dm_labels[dm] = '' + else: + dm_labels[dm] = '_dm{}'.format(dm) +for dm in decay_modes: + for wp in working_points: + for channel in channels: + print('Processing {} {} WP DM = {}'.format(channel, wp, dm)) + for model_name in hist_models.keys(): + turnOn_data = turnOn[idx_data][dm][wp][channel][model_name] + eff_data = None + turnOn_mc = turnOn[idx_mc][dm][wp][channel][model_name] + eff_mc = None + if 'fit' in model_name: + passed_data, total_data, eff_data, passed_mc, total_mc, eff_mc = AutoRebinAndEfficiency(turnOn_data.hist_passed.GetPtr(), + turnOn_data.hist_total.GetPtr(), + turnOn_mc.hist_passed.GetPtr(), + turnOn_mc.hist_total.GetPtr()) + #FixEfficiencyBins(passed_data, total_data) + #FixEfficiencyBins(passed_mc, total_mc) + else: + passed_data, total_data = turnOn_data.hist_passed.GetPtr(), turnOn_data.hist_total.GetPtr() + FixEfficiencyBins(passed_data, total_data) + eff_data = ROOT.TEfficiency(passed_data, total_data) + passed_mc, total_mc = turnOn_mc.hist_passed.GetPtr(), turnOn_mc.hist_total.GetPtr() + FixEfficiencyBins(passed_mc, total_mc) + eff_mc = ROOT.TEfficiency(passed_mc, total_mc) + name_pattern_data = '{}_{}_{}{}_{}_{{}}'.format(labels[idx_data], channel, wp, dm_labels[dm], model_name) + turnOn_data.name_pattern = name_pattern_data + output_file.WriteTObject(total_data, name_pattern_data.format('total'), 'Overwrite') + output_file.WriteTObject(passed_data, name_pattern_data.format('passed'), 'Overwrite') + output_file.WriteTObject(eff_data, name_pattern_data.format('eff'), 'Overwrite') + turnOn_data.eff = eff_data + name_pattern_mc = '{}_{}_{}{}_{}_{{}}'.format(labels[idx_mc], channel, wp, dm_labels[dm], model_name) + turnOn_mc.name_pattern = name_pattern_mc + output_file.WriteTObject(total_mc, name_pattern_mc.format('total'), 'Overwrite') + output_file.WriteTObject(passed_mc, name_pattern_mc.format('passed'), 'Overwrite') + output_file.WriteTObject(eff_mc, name_pattern_mc.format('eff'), 'Overwrite') + turnOn_mc.eff = eff_mc + +colors = [ ROOT.kRed, ROOT.kBlack ] +canvas = RootPlotting.CreateCanvas() + +n_plots = len(decay_modes) * len(channels) * len(working_points) +plot_id = 0 +for channel in channels: + for wp in working_points: + for dm in decay_modes: + if dm == 'all': + dm_label = '' + dm_plain_label = '' + else: + dm_label = ' DM={}'.format(dm) + dm_plain_label = '_dm{}'.format(dm) + ratio_graph = None + ref_hist = hist_models['plot'].GetHistogram() + ratio_ref_hist = ref_hist.Clone() + turnOns = [None] * n_inputs + curves = [None] * n_inputs + for input_id in range(n_inputs): + turnOns[input_id] = turnOn[input_id][dm][wp][channel]['plot'] + curves[input_id] = turnOns[input_id].eff + y_min, y_max = (0, 1) + y_title = 'Efficiency' + title = '{} {}{}'.format(channel, wp, dm_label) + plain_title = '{}_{}{}'.format(channel, wp, dm_plain_label) + main_pad, ratio_pad, title_controls = RootPlotting.CreateTwoPadLayout(canvas, ref_hist, ratio_ref_hist, + log_x=use_logx, title=title) + RootPlotting.ApplyAxisSetup(ref_hist, ratio_ref_hist, x_title=x_title, y_title=y_title, + ratio_y_title='Ratio', y_range=(y_min, y_max * 1.1), max_ratio=1.5) + legend = RootPlotting.CreateLegend(pos=(0.78, 0.28), size=(0.2, 0.15)) + for input_id in range(n_inputs): + curve = curves[input_id] + curve.Draw('SAME') + RootPlotting.ApplyDefaultLineStyle(curve, colors[input_id]) + legend.AddEntry(curve, labels[input_id], 'PLE') + + if input_id < n_inputs - 1: + ratio_graph = RootPlotting.CreateEfficiencyRatioGraph(turnOns[input_id].hist_passed, + turnOns[input_id].hist_total, + turnOns[-1].hist_passed, + turnOns[-1].hist_total) + if ratio_graph: + output_file.WriteTObject(ratio_graph, 'ratio_{}'.format(plain_title), 'Overwrite') + ratio_pad.cd() + ratio_color = colors[input_id] if n_inputs > 2 else ROOT.kBlack + RootPlotting.ApplyDefaultLineStyle(ratio_graph, ratio_color) + ratio_graph.Draw("0PE SAME") + main_pad.cd() + legend.Draw() + + canvas.Update() + output_file.WriteTObject(canvas, 'canvas_{}'.format(plain_title), 'Overwrite') + RootPlotting.PrintAndClear(canvas, args.output + '.pdf', plain_title, plot_id, n_plots, + [ main_pad, ratio_pad ]) + plot_id += 1 +output_file.Close() diff --git a/TauTagAndProbe/python/estimateBackgrounds.py b/TauTagAndProbe/python/estimateBackgrounds.py new file mode 100644 index 00000000000..fc2b0d5ef00 --- /dev/null +++ b/TauTagAndProbe/python/estimateBackgrounds.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python + +import argparse +from array import array +import math +import numpy as np +import os +import re +import sys +import ROOT + +parser = argparse.ArgumentParser(description='Estimate backgrounds.') +parser.add_argument('--input', required=True, type=str, nargs='+', help="input files") +parser.add_argument('--output-data', required=True, type=str, help="output file prefix for data") +parser.add_argument('--output-dy-mc', required=True, type=str, help="output file prefix for ZTT MC") +parser.add_argument('--output-signal', required=False, type=str, default='signal', help="output file prefix for signal region") +parser.add_argument('--output-w-enriched', required=False, type=str, default='w_enriched', help="output file prefix for w-enriched region") +parser.add_argument('--mode', required=True, type=str, help="subtract backgrounds from data or add backgrounds to ZTT MC") +args = parser.parse_args() + +if not(args.mode == "subtract-from-data" or args.mode == "add-to-dy-mc"): + raise ValueError("Invalid configuration parameter mode = '%s' !!" % args.mode) + +path_prefix = '' if 'TauTriggerTools' in os.getcwd() else 'TauTriggerTools/' +sys.path.insert(0, path_prefix + 'Common/python') +from AnalysisTypes import * +from AnalysisTools import * +ROOT.ROOT.EnableImplicitMT(4) +ROOT.gROOT.SetBatch(True) +ROOT.gInterpreter.Declare('#include "{}TauTagAndProbe/interface/PyInterface.h"'.format(path_prefix)) + +input_vec = ListToStdVector(args.input) + +df_input = ROOT.RDataFrame('events', input_vec) +print "df_input = ", df_input +print("sum = %1.2f (%i)" % (df_input.Sum("weight").GetValue(), df_input.Count().GetValue())) +print("") + +df_output_data = None +df_output_dy_mc = None + +processes = [ "data", "ztt-mc", "zmm-mc", "w-mc", "ttbar-mc" ] + +# define SS->OS extrapolation factor for QCD multijet background +sf_qcd_SS_to_OS = 1. +print("sf_qcd_SS_to_OS = %1.2f" % sf_qcd_SS_to_OS) +print("") + +#---------------------------------------------------------------------------------------------------- +# define integer constants +type_data = 0 +type_ztt_mc = 1 +type_zmm_mc = 2 +type_w_mc = 3 +type_ttbar_mc = 4 + +selection_OS_low_mT = 0 +selection_OS_high_mT = 1 +selection_SS_low_mT = 2 +selection_SS_high_mT = 3 +selection_signal = 4 +selection_w_enriched = 5 +#---------------------------------------------------------------------------------------------------- + +def get_type(process): + if process == "data": + return type_data + elif process == "ztt-mc": + return type_ztt_mc + elif process == "zmm-mc": + return type_zmm_mc + elif process == "w-mc": + return type_w_mc + elif process == "ttbar-mc": + return type_ttbar_mc + else: + raise ValueError("Invalid function argument: process = '%s' !!" % process) + +#---------------------------------------------------------------------------------------------------- +def makeBinContentsPositive(histogramName, histogram): + print(": histogram = %s" % histogramName) + integral_original = histogram.Integral() + for i in range(histogram.GetNbinsX()): + binContent = histogram.GetBinContent(i + 1) + binError = histogram.GetBinError(i + 1) + if binContent < 0.: + histogram.SetBinContent(i + 1, 0.) + histogram.SetBinError(i + 1, math.sqrt(binContent**2 + binError**2)) + integral_modified = histogram.Integral() + print("integral: original = %1.2f, modified = %1.2f" % (integral_original, integral_modified)) + if integral_modified > 0.: + histogram.Scale(integral_original/integral_modified) + +def makeControlPlot(histograms, var, useLogScale, outputFileName): + + print("") + + canvasSizeX = 800 + canvasSizeY = 900 + + canvas = ROOT.TCanvas("canvas", "", canvasSizeX, canvasSizeY) + canvas.SetFillColor(10) + canvas.SetFillStyle(4000) + canvas.SetFillColor(10) + canvas.SetTicky() + canvas.SetBorderSize(2) + canvas.SetLeftMargin(0.12) + canvas.SetBottomMargin(0.12) + + topPad = ROOT.TPad("topPad", "topPad", 0.00, 0.35, 1.00, 1.00) + topPad.SetFillColor(10) + topPad.SetTopMargin(0.055) + topPad.SetLeftMargin(0.155) + topPad.SetBottomMargin(0.030) + topPad.SetRightMargin(0.050) + topPad.SetLogy(useLogScale) + + bottomPad = ROOT.TPad("bottomPad", "bottomPad", 0.00, 0.00, 1.00, 0.35) + bottomPad.SetFillColor(10) + bottomPad.SetTopMargin(0.020) + bottomPad.SetLeftMargin(0.155) + bottomPad.SetBottomMargin(0.310) + bottomPad.SetRightMargin(0.050) + bottomPad.SetLogy(False) + + canvas.cd() + topPad.Draw() + topPad.cd() + + histogram_data = histograms['data'] + print("integral['data'] = %1.2f" % histogram_data.Integral()) + + xAxis_top = histogram_data.GetXaxis() + xAxis_top.SetTitle(var); + xAxis_top.SetTitleOffset(1.2); + xAxis_top.SetLabelColor(10); + xAxis_top.SetTitleColor(10); + + yAxis_top = histogram_data.GetYaxis() + yAxis_top.SetTitle("Events") + yAxis_top.SetTitleOffset(1.2) + yAxis_top.SetTitleSize(0.065) + yAxis_top.SetLabelSize(0.05) + yAxis_top.SetTickLength(0.04) + + legendTextSize = 0.040 + legendPosX = 0.740 + legendPosY = 0.510 + legendSizeX = 0.190 + legendSizeY = 0.420 + + legend = ROOT.TLegend(legendPosX, legendPosY, legendPosX + legendSizeX, legendPosY + legendSizeY, "", "brNDC") + legend.SetFillStyle(0) + legend.SetBorderSize(0) + legend.SetFillColor(10) + legend.SetTextSize(legendTextSize) + + histogram_sum = None + for histogramName, histogram in histograms.items(): + if histogramName != "data": + if not histogram_sum: + histogram_sum = histogram.Clone("histogram_sum") + else: + histogram_sum.Add(histogram.GetPtr()) + print("integral['sum'] = %1.2f" % histogram_sum.Integral()) + + yMin = None + yMax = None + if useLogScale: + yMin = 5.e-1 + yMax = 1.e+1*ROOT.TMath.Max(histogram_data.GetMaximum(), histogram_sum.GetMaximum()) + else: + yMin = 0. + yMax = 1.3*ROOT.TMath.Max(histogram_data.GetMaximum(), histogram_sum.GetMaximum()) + + histogram_data.SetTitle("") + histogram_data.SetStats(False) + histogram_data.SetMaximum(yMax) + histogram_data.SetMinimum(yMin) + histogram_data.SetMarkerStyle(20) + markerSize = None + if histogram_data.GetNbinsX() < 40: + markerSize = 2 + else: + markerSize = 1 + histogram_data.SetMarkerSize(markerSize) + histogram_data.SetMarkerColor(1) + histogram_data.SetLineColor(1) + legend.AddEntry(histogram_data.GetPtr(), "observed", "p"); + histogram_data.Draw("ep"); + + colors = {} + colors['ztt-mc'] = 796 + colors['zmm-mc'] = 842 + colors['w-mc'] = 634 + colors['ttbar-mc'] = 592 + colors['qcd'] = 606 + + legendEntries = {} + legendEntries['ztt-mc'] = "Z#rightarrow#tau#tau" + legendEntries['zmm-mc'] = "Z#rightarrow#mu#mu" + legendEntries['w-mc'] = "W+jets" + legendEntries['ttbar-mc'] = "t#bar{t}+jets" + legendEntries['qcd'] = "Multijet" + + histograms_stack = ROOT.THStack("stack", ""); + for histogramName in [ "qcd", "w-mc", "ttbar-mc", "zmm-mc", "ztt-mc" ]: + histogram = histograms[histogramName] + makeBinContentsPositive(histogramName, histogram) + print("integral['%s'] = %1.2f" % (histogramName, histogram.Integral())) + histogram.SetFillColor(colors[histogramName]) + histogram.SetLineColor(1) + histograms_stack.Add(histogram.GetPtr()) + for histogramName in reversed([ "qcd", "w-mc", "ttbar-mc", "zmm-mc", "ztt-mc" ]): + histogram = histograms[histogramName] + legend.AddEntry(histogram.GetPtr(), legendEntries[histogramName], "f") + histograms_stack.Draw("histsame") + + histogram_data.Draw("epsame") + histogram_data.Draw("axissame") + + legend.Draw() + + canvas.cd() + bottomPad.Draw() + bottomPad.cd() + + histogram_ratio = histogram_data.Clone("histogram_ratio") + histogram_ratio.Reset() + if not histogram_ratio.GetSumw2N(): + histogram_ratio.Sumw2() + histogram_ratio.Divide(histogram_data.GetPtr(), histogram_sum); + for i in range(histogram_ratio.GetNbinsX()): + binContent = histogram_ratio.GetBinContent(i + 1) + histogram_ratio.SetBinContent(i + 1, binContent - 1.0) + histogram_ratio.SetTitle("") + histogram_ratio.SetStats(False) + histogram_ratio.SetMinimum(-0.50) + histogram_ratio.SetMaximum(+0.50) + histogram_ratio.SetMarkerStyle(histogram_data.GetMarkerStyle()) + histogram_ratio.SetMarkerSize(histogram_data.GetMarkerSize()) + histogram_ratio.SetMarkerColor(histogram_data.GetMarkerColor()) + histogram_ratio.SetLineColor(histogram_data.GetLineColor()) + + xAxis_bottom = histogram_ratio.GetXaxis() + xAxis_bottom.SetTitle(xAxis_top.GetTitle()) + xAxis_bottom.SetLabelColor(1) + xAxis_bottom.SetTitleColor(1) + xAxis_bottom.SetTitleOffset(1.20) + xAxis_bottom.SetTitleSize(0.12) + xAxis_bottom.SetLabelOffset(0.02) + xAxis_bottom.SetLabelSize(0.10) + xAxis_bottom.SetTickLength(0.055) + + yAxis_bottom = histogram_ratio.GetYaxis() + yAxis_bottom.SetTitle("#frac{Data - Expectation}{Expectation}") + yAxis_bottom.SetTitleOffset(0.80) + yAxis_bottom.SetNdivisions(505) + yAxis_bottom.CenterTitle() + yAxis_bottom.SetTitleSize(0.09) + yAxis_bottom.SetLabelSize(0.10) + yAxis_bottom.SetTickLength(0.04) + + histogram_ratio.Draw("ep") + + line = ROOT.TF1("line","0", xAxis_bottom.GetXmin(), xAxis_bottom.GetXmax()) + line.SetLineStyle(3) + line.SetLineWidth(1) + line.SetLineColor(1) + line.Draw("same") + + histogram_ratio.Draw("epsame") + + canvas.Update() + idx = outputFileName.rfind('.') + outputFileName_plot = outputFileName[0:idx] + if useLogScale: + outputFileName_plot += "_log" + else: + outputFileName_plot += "_linear" + canvas.Print(outputFileName_plot + ".png") + canvas.Print(outputFileName_plot + ".pdf") +#---------------------------------------------------------------------------------------------------- + +#-------------- MAKE ROOT FILES FOR SIGNAL AND W-ENRICHED SIDEBANDS (only in "subtract-from-data" mode) +if args.mode == "subtract-from-data": + df_signal_data = df_input.Filter("selection == {} && type == {}".format(selection_signal, type_data)) + df_signal_ztt_mc = df_input.Filter("selection == {} && type == {}".format(selection_signal, type_ztt_mc)) + df_signal_qcd = df_input.Filter("selection == {}".format(selection_SS_low_mT)) + df_w_enriched_data = df_input.Filter("selection == {} && type == {}".format(selection_w_enriched, type_data)) + print("Total weight: df_w_enriched_data = ", df_w_enriched_data.Sum("weight").GetValue()) + df_w_enriched_mc = df_input.Filter(''' + (selection == {} && type == {}) + || (selection == {} && type == {}) + || (selection == {} && type == {}) + '''.format(selection_w_enriched, type_ttbar_mc, + selection_w_enriched, type_w_mc, + selection_w_enriched, type_zmm_mc)) + print("Total weight: df_w_enriched_mc = ", df_w_enriched_mc.Sum("weight").GetValue()) + df_signal_data.Snapshot('events', args.output_signal + '_data.root') + df_signal_ztt_mc.Snapshot('events', args.output_signal + '_ztt_mc.root') + df_signal_qcd.Snapshot('events', args.output_signal + '_signal_qcd_inputs.root') + df_w_enriched_data.Snapshot('events', args.output_w_enriched + '_data.root') + df_w_enriched_mc.Snapshot('events', args.output_w_enriched + '_mc.root') +#---------------------------------------------------------------------------------------------------- + +# step 1: determine scale-factor for W+jets background in SS region +df_SS_high_mT = df_input.Filter("selection == %i" % selection_SS_high_mT) +sum_SS_high_mT = {} +for process in processes: + df_SS_high_mT_process = df_SS_high_mT.Filter("type == %i" % get_type(process)) + sum_SS_high_mT[process] = df_SS_high_mT_process.Sum("weight").GetValue() + print("sum_SS_high_mT['%s'] = %1.2f (%i)" % (process, sum_SS_high_mT[process], df_SS_high_mT_process.Count().GetValue())) +sf_w_mc_SS = (sum_SS_high_mT['data'] - (sum_SS_high_mT["ztt-mc"] + sum_SS_high_mT['zmm-mc'] + sum_SS_high_mT['ttbar-mc']))/sum_SS_high_mT['w-mc'] +print("sf_w_mc_SS = %1.2f" % sf_w_mc_SS) +print("") + +# step 2: determine QCD multijet background in SS region +# (Note: QCD multijet background in SS high mT sideband assumed to be negligible) +df_SS_low_mT = df_input.Filter("selection == %i" % selection_SS_low_mT) +sum_SS_low_mT = {} +sum_data_SS_low_mT = 0. +sum_mc_SS_low_mT = 0. +for process in processes: + df_SS_low_mT_process = df_SS_low_mT.Filter("type == %i" % get_type(process)) + sum_SS_low_mT[process] = df_SS_low_mT_process.Sum("weight").GetValue() + if process == "w-mc": + sum_SS_low_mT[process] *= sf_w_mc_SS + print("sum_SS_low_mT['%s'] = %1.2f (%i)" % (process, sum_SS_low_mT[process], df_SS_low_mT_process.Count().GetValue())) + if process == "data": + sum_data_SS_low_mT += sum_SS_low_mT[process] + else: + sum_mc_SS_low_mT += sum_SS_low_mT[process] +sum_SS_low_mT['qcd'] = sum_data_SS_low_mT - sum_mc_SS_low_mT +print("sum_SS_low_mT['qcd'] = %1.2f" % sum_SS_low_mT['qcd']) +print("") + +# step 3: determine scale-factor for W+jets background in OS region +# (Note: QCD multijet background in OS high mT sideband assumed to be negligible) +df_OS_high_mT = df_input.Filter("selection == %i" % selection_OS_high_mT) +sum_OS_high_mT = {} +for process in processes: + df_OS_high_mT_process = df_OS_high_mT.Filter("type == %i" % get_type(process)) + sum_OS_high_mT[process] = df_OS_high_mT_process.Sum("weight").GetValue() + print("sum_OS_high_mT['%s'] = %1.2f (%i)" % (process, sum_OS_high_mT[process], df_OS_high_mT_process.Count().GetValue())) +sf_w_mc_OS = (sum_OS_high_mT['data'] - (sum_OS_high_mT["ztt-mc"] + sum_OS_high_mT['zmm-mc'] + sum_OS_high_mT['ttbar-mc']))/sum_OS_high_mT['w-mc'] +print("sf_w_mc_OS = %1.2f" % sf_w_mc_OS) +print("") + +# step 4: print event yields in "signal" region for input RDataFrame objects +df_OS_low_mT = df_input.Filter("selection == %i" % selection_OS_low_mT) +sum_OS_low_mT = {} +sum_data_OS_low_mT = 0. +sum_mc_OS_low_mT = 0. +for process in processes: + df_OS_low_mT_process = df_OS_low_mT.Filter("type == %i" % get_type(process)) + sum_OS_low_mT[process] = df_OS_low_mT_process.Sum("weight").GetValue() + if process == "w-mc": + sum_OS_low_mT[process] *= sf_w_mc_OS + print("sum_OS_low_mT['%s'] = %1.2f (%i)" % (process, sum_OS_low_mT[process], df_OS_low_mT_process.Count().GetValue())) + if process == "data": + sum_data_OS_low_mT += sum_OS_low_mT[process] + else: + sum_mc_OS_low_mT += sum_OS_low_mT[process] +print("sum_OS_low_mT['qcd'] = %1.2f" % (sum_SS_low_mT['qcd']*sf_qcd_SS_to_OS)) +print("") + +# step 5: build RDataFrame object for 'data' +final_weight_data = ROOT.final_weight_data.Initialize(sf_qcd_SS_to_OS, sf_w_mc_OS, sf_w_mc_SS) +if args.mode == "subtract-from-data": + df_output_data = df_input.Filter("(selection == %i && type == %i) || (selection == %i) || (selection == %i && (type == %i || type == %i || type == %i))" % (selection_OS_low_mT, type_data, selection_SS_low_mT, selection_OS_low_mT, type_zmm_mc, type_w_mc, type_ttbar_mc)) + df_output_data = df_output_data.Define("final_weight", "final_weight_data::GetDefault().operator()(selection, type, weight)") +elif args.mode == "add-to-dy-mc": + df_output_data = df_input.Filter("selection == %i && type == %i" % (selection_OS_low_mT, type_data)) + +# step 6: build RDataFrame object for 'dy-mc' +final_weight_dy_mc = ROOT.final_weight_dy_mc.Initialize(sf_qcd_SS_to_OS, sf_w_mc_OS, sf_w_mc_SS) +if args.mode == "subtract-from-data": + df_output_dy_mc = df_input.Filter("selection == %i && type == %i" % (selection_OS_low_mT, type_ztt_mc)) +elif args.mode == "add-to-dy-mc": + df_output_dy_mc = df_input.Filter("(selection == %i && type == %i) || (selection == %i) || (selection == %i && (type == %i || type == %i || type == %i))" % (selection_OS_low_mT, type_ztt_mc, selection_SS_low_mT, selection_OS_low_mT, type_zmm_mc, type_w_mc, type_ttbar_mc)) + df_output_dy_mc = df_output_dy_mc.Define("final_weight", "final_weight_dy_mc::GetDefault().operator()(selection, type, weight)") + +# step 7: print data and dy-mc event yields in "signal" region for output RDataFrame objects +print "df_output_data = ", df_output_data +weight_data = None +if args.mode == "subtract-from-data": + weight_data = "final_weight" +else: + weight_data = "weight" +sum_OS_low_mT_data = df_output_data.Sum(weight_data).GetValue() +print("sum_OS_low_mT_data = %1.2f (%i)" % (sum_OS_low_mT_data, df_output_data.Count().GetValue())) +print "df_output_dy_mc = ", df_output_dy_mc +weight_dy_mc = None +if args.mode == "add-to-dy-mc": + weight_dy_mc = "final_weight" +else: + weight_dy_mc = "weight" +sum_OS_low_mT_dy_mc = df_output_dy_mc.Sum(weight_dy_mc).GetValue() +print("sum_OS_low_mT_dy_mc = %1.2f (%i)" % (sum_OS_low_mT_dy_mc, df_output_dy_mc.Count().GetValue())) +print("") + +# step 8: make control plots (only implemented for "add-to-dy-mc" mode so far) +var = "tau_pt" +branchname_weight_data = "weight" +branchname_weight_dy_mc = "final_weight" +hist_model = ROOT.RDF.TH1DModel(var, var, 18, 20., 200.) +histograms = {} +if args.mode == "add-to-dy-mc": + discr_name = "byDeepTau2017v2p1VSjet" + for wp in [ "VVVLoose", "VVLoose", "VLoose", "Loose", "Medium", "Tight", "VTight", "VVTight" ]: + wp_bit = ParseEnum(DiscriminatorWP, wp) + offlineTauSel = "tau_pt > 20 && abs(tau_eta) < 2.3" + offlineTauSel += " && (tau_decayMode == 0 || tau_decayMode == 1 || tau_decayMode == 2 || tau_decayMode == 10 || tau_decayMode == 11)" + offlineTauSel += " && (%s & (1 << %i))" % (discr_name, wp_bit) + df_data_passing_offlineTauSel = df_output_data.Filter(offlineTauSel) + histograms['data'] = df_data_passing_offlineTauSel.Histo1D(hist_model, var, branchname_weight_data) + df_dy_mc_passing_offlineTauSel = df_output_dy_mc.Filter(offlineTauSel) + histograms['ztt-mc'] = df_dy_mc_passing_offlineTauSel.Filter("selection == %i && type == %i" % (selection_OS_low_mT, type_ztt_mc)).Histo1D(hist_model, var, branchname_weight_dy_mc) + histograms['zmm-mc'] = df_dy_mc_passing_offlineTauSel.Filter("selection == %i && type == %i" % (selection_OS_low_mT, type_zmm_mc)).Histo1D(hist_model, var, branchname_weight_dy_mc) + histograms['w-mc'] = df_dy_mc_passing_offlineTauSel.Filter("selection == %i && type == %i" % (selection_OS_low_mT, type_w_mc)).Histo1D(hist_model, var, branchname_weight_dy_mc) + histograms['ttbar-mc'] = df_dy_mc_passing_offlineTauSel.Filter("selection == %i && type == %i" % (selection_OS_low_mT, type_ttbar_mc)).Histo1D(hist_model, var, branchname_weight_dy_mc) + histograms['qcd'] = df_dy_mc_passing_offlineTauSel.Filter("selection == %i" % selection_SS_low_mT).Histo1D(hist_model, var, branchname_weight_dy_mc) + #outputFileName = "estimateBackgrounds_%s%s_%s.pdf" % (discr_name, wp, var) + outputFileName = "%s_%s%s_%s.pdf" % (args.output_dy_mc, discr_name, wp, var) + makeControlPlot(histograms, var, True, outputFileName) + print("") + +# step 9: write RDataFrame objects to output files +df_output_data.Snapshot('events', args.output_data + '.root') +df_output_dy_mc.Snapshot('events', args.output_dy_mc + '.root') diff --git a/TauTagAndProbe/python/fitTurnOn.py b/TauTagAndProbe/python/fitTurnOn.py new file mode 100644 index 00000000000..aa2275a98b2 --- /dev/null +++ b/TauTagAndProbe/python/fitTurnOn.py @@ -0,0 +1,267 @@ +import argparse +import os +import sys +import math +import numpy as np +import matplotlib +matplotlib.use('Agg') +from matplotlib import pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +import scipy +import copy +from scipy import interpolate + +from sklearn.gaussian_process import GaussianProcessRegressor +from sklearn.gaussian_process.kernels import Matern, ConstantKernel + +import ROOT +ROOT.gROOT.SetBatch(True) +ROOT.TH1.SetDefaultSumw2() + +path_prefix = '' if 'TauTriggerTools' in os.getcwd() else 'TauTriggerTools/' +sys.path.insert(0, path_prefix + 'Common/python') +from RootObjects import Histogram, Graph + +parser = argparse.ArgumentParser(description='Fit turn-on curves.') +parser.add_argument('--input', required=True, type=str, help="ROOT file with turn-on curves") +parser.add_argument('--output', required=True, type=str, help="output file prefix") +parser.add_argument('--channels', required=False, type=str, default='etau,mutau,ditau', help="channels to process") +parser.add_argument('--decay-modes', required=False, type=str, default='all,0,1,10,11', help="decay modes to process") +parser.add_argument('--mode', required=True, type=str, default='adaptive', help="Choose from one of these 2 bin schemes: 'fixed' or 'adaptive'") +parser.add_argument('--working-points', required=False, type=str, + default='VVVLoose,VVLoose,VLoose,Loose,Medium,Tight,VTight,VVTight', + help="working points to process") +args = parser.parse_args() + +if((args.mode != "adaptive") and (args.mode != "fixed")): + raise ValueError("Invalid configuration parameter = '%s'!!" % (args.mode)) + +def MinTarget(dy, eff): + y = np.cumsum(dy) + return np.sum(((eff.y - y) / (eff.y_error_high + eff.y_error_low)) ** 2) + +class FitResults: + def __init__(self, eff, x_pred): + kernel_high = ConstantKernel() + kernel_low = ConstantKernel() * Matern(nu=1, length_scale_bounds=(5, 50), length_scale=10) + N = eff.x.shape[0] + res = scipy.optimize.minimize(MinTarget, np.zeros(N), args=(eff,), bounds = [ [0, 1] ] * N, + options={"maxfun": int(1e6)}) + if not res.success: + print(res) + raise RuntimeError("Unable to prefit") + + eff = copy.deepcopy(eff) + new_y = np.cumsum(res.x) + delta = eff.y - new_y + eff.y_error_low = np.sqrt(eff.y_error_low ** 2 + delta ** 2) + eff.y_error_high = np.sqrt(eff.y_error_high ** 2 + delta ** 2) + eff.y = new_y + yerr = np.maximum(eff.y_error_low, eff.y_error_high) + + self.pt_start_flat = eff.x[-1] + best_chi2_ndof = float('inf') + for n in range(1, N): + flat_eff, residuals, _, _, _ = np.polyfit(eff.x[N-n-1:], eff.y[N-n-1:], 0, w=1/yerr[N-n-1:], full=True) + chi2_ndof = residuals[0] / n + #print(n, chi2_ndof) + if (chi2_ndof > 0 and chi2_ndof < best_chi2_ndof) or eff.x[N-n-1] + eff.x_error_high[N-n-1] >= 80: + self.pt_start_flat = eff.x[N-n-1] + best_chi2_ndof = chi2_ndof + #print("pt_start_flat = %1.2f: best_chi2_ndof = %1.2f" % (self.pt_start_flat, best_chi2_ndof)) + if best_chi2_ndof > 20: + print("Warning: Unable to determine the plateau region") + self.pt_start_flat = eff.x[-1] + + low_pt = eff.x <= self.pt_start_flat + high_pt = eff.x >= self.pt_start_flat + + self.gp_high = GaussianProcessRegressor(kernel=kernel_high, alpha=yerr[high_pt] ** 2, n_restarts_optimizer=10) + self.gp_high.fit(np.atleast_2d(eff.x[high_pt]).T, eff.y[high_pt]) + self.gp_low = GaussianProcessRegressor(kernel=kernel_low, alpha=np.append([0], yerr[low_pt] ** 2), + n_restarts_optimizer=10) + self.gp_low.fit(np.atleast_2d(np.append([10], eff.x[low_pt])).T, np.append([0], eff.y[low_pt])) + + self.y_pred, sigma_pred = self.Predict(x_pred) + + sigma_orig = np.zeros(N) + for n in range(N): + idx = np.argmin(abs(x_pred - eff.x[n])) + sigma_orig[n] = sigma_pred[idx] + + interp_kind = 'linear' + sp = interpolate.interp1d(eff.x, sigma_orig, kind=interp_kind, fill_value="extrapolate") + sigma_interp = sp(x_pred) + max_unc = 0.05 / math.sqrt(2) + sigma_pred, = self.ApplyStep(x_pred, [ [ sigma_pred, sigma_interp ] ], eff.x[0], eff.x[-1] ) + outer_trend = np.minimum(np.ones(x_pred.shape[0]), (x_pred - eff.x[-1]) / eff.x[-1]) + outer_sigma = np.maximum(sigma_pred, sigma_pred + (max_unc - sigma_pred) * outer_trend ) + self.sigma_pred = np.where(x_pred < eff.x[-1], sigma_pred, outer_sigma ) + + def Predict(self, x_pred): + y_pred_high, sigma_high = self.gp_high.predict(np.atleast_2d(x_pred).T, return_std=True) + y_pred_low, sigma_low = self.gp_low.predict(np.atleast_2d(x_pred).T, return_std=True) + return self.ApplyStep(x_pred, [ [y_pred_low, y_pred_high], [sigma_low, sigma_high] ], self.pt_start_flat) + + def ApplyStep(self, x_pred, functions, x0, x1 = None): + step = (np.tanh(0.1*(x_pred - x0)) + 1) / 2 + if x1 is not None: + step *= (np.tanh(0.1*(x1 - x_pred)) + 1) / 2 + step = np.where(step > 0.999, 1, step) + step = np.where(step < 0.001, 0, step) + results = [] + for fn in functions: + results.append(fn[0] * (1-step) + fn[1] * step) + return tuple(results) + +mode = args.mode +channels = args.channels.split(',') +decay_modes = args.decay_modes.split(',') +working_points = args.working_points.split(',') +ch_validity_thrs = { 'etau': 35, 'mutau': 32, 'ditau': 40 } + +input_file = ROOT.TFile(args.input, 'READ') +output_file = ROOT.TFile('{}.root'.format(args.output), 'RECREATE', '', ROOT.RCompressionSetting.EDefaults.kUseSmallest) + +for channel in channels: + with PdfPages('{}_{}.pdf'.format(args.output, channel)) as pdf: + for wp in working_points: + for dm in decay_modes: + print('Processing {} {} WP DM = {}'.format(channel, wp, dm)) + dm_label = '_dm{}'.format(dm) if dm != 'all' else '' + name_pattern = '{{}}_{}_{}{}_fit_eff'.format(channel, wp, dm_label) + dm_label = '_dm'+ dm if len(dm) > 0 else '' + #print("name_pattern.format(data)", name_pattern.format('data')) + eff_data_root = input_file.Get(name_pattern.format('data')) + #print("name_pattern.format(data)", name_pattern.format('data')) + eff_mc_root = input_file.Get(name_pattern.format('mc')) + eff_data = Graph(root_graph=eff_data_root) + print("type(eff_data): ", type(eff_data)) + eff_mc = Graph(root_graph=eff_mc_root) + print("type(eff_mc): ", type(eff_mc)) + pred_step = 0.1 + + if(mode == "adaptive"): ## ----Christian's adaptive binning scheme ------ + print("Using Adaptive binning") + x_low = min(eff_data.x[0] - eff_data.x_error_low[0], eff_mc.x[0] - eff_mc.x_error_low[0]) + x_high = max(eff_data.x[-1] + eff_data.x_error_high[-1], eff_mc.x[-1] + eff_mc.x_error_high[-1]) + x_array = [] + x_array.append(x_low) + x_array.extend([ x_data for x_data in eff_data.x ]) + x_array.append(x_high) + x_pred = np.array(x_array) + elif(mode == "fixed"): ## ----- Konstantin's fixed binning scheme ------ + print("Using Fixed binning: {}".format(pred_step)) + x_low, x_high = 20, 1000 ## DEF LINE + #x_low, x_high = 20, 200 ## MY LINE + x_pred = np.arange(x_low, x_high + pred_step / 2, pred_step) + + print("x_low = %1.2f, x_high = %1.2f" % (x_low, x_high)) + print("x_pred = ", x_pred) + + eff_data_fitted = FitResults(eff_data, x_pred) + ##print("eff_data_fitted = ", eff_data_fitted.y_pred) + eff_mc_fitted = FitResults(eff_mc, x_pred) + ##print("eff_mc_fitted = ", eff_mc_fitted.y_pred) + + sf = eff_data_fitted.y_pred / eff_mc_fitted.y_pred + ##print("sf = ", sf) + sf_sigma = np.sqrt( (eff_data_fitted.sigma_pred / eff_mc_fitted.y_pred) ** 2 \ + + (eff_data_fitted.y_pred / (eff_mc_fitted.y_pred ** 2) * eff_mc_fitted.sigma_pred ) ** 2 ) + ##print("sf_sigma = ", sf_sigma) + + fig, (ax, ax_ratio) = plt.subplots(2, 1, figsize=(7, 7), sharex=True, + gridspec_kw = {'height_ratios':[2, 1]}) + mc_color = 'g' + data_color = 'k' + trans = 0.3 + + plt_data = ax.errorbar(eff_data.x, eff_data.y, xerr=(eff_data.x_error_low, eff_data.x_error_high), + yerr=(eff_data.y_error_low, eff_data.y_error_high), fmt=data_color+'.', + markersize=5) + plt_mc = ax.errorbar(eff_mc.x, eff_mc.y, xerr=(eff_mc.x_error_low, eff_mc.x_error_high), + yerr=(eff_mc.y_error_low, eff_mc.y_error_high), fmt=mc_color+'.', markersize=5) + + plt_data_fitted = ax.plot(x_pred, eff_data_fitted.y_pred, data_color+'--') + ax.fill(np.concatenate([x_pred, x_pred[::-1]]), + np.concatenate([eff_data_fitted.y_pred - eff_data_fitted.sigma_pred, + (eff_data_fitted.y_pred + eff_data_fitted.sigma_pred)[::-1]]), + alpha=trans, fc=data_color, ec='None') + + plt_mc_fitted = ax.plot(x_pred, eff_mc_fitted.y_pred, mc_color+'--') + ax.fill(np.concatenate([x_pred, x_pred[::-1]]), + np.concatenate([eff_mc_fitted.y_pred - eff_mc_fitted.sigma_pred, + (eff_mc_fitted.y_pred + eff_mc_fitted.sigma_pred)[::-1]]), + alpha=trans, fc=mc_color, ec='None') + + ax.plot( ax.get_xlim(), [ 1.0, 1.0 ], 'r--' ) + ax_ratio.plot( ax.get_xlim(), [ 1.0, 1.0 ], 'r--' ) + + ax_ratio.plot(x_pred, sf, 'b--') + ax_ratio.fill(np.concatenate([x_pred, x_pred[::-1]]), + np.concatenate([sf - sf_sigma, (sf + sf_sigma)[::-1]]), + alpha=trans, fc='b', ec='None') + + title = "Turn-ons for {} trigger with {} DeepTau VSjet".format(channel, wp) + if dm != 'all': + title += " for DM={}".format(dm) + else: + title += " for all DMs" + ax.set_title(title, fontsize=16) + ax.set_ylabel("Efficiency", fontsize=12) + ax.set_ylim([ 0., 1.1 ]) + ax.set_xlim([ 20, min(200, plt.xlim()[1]) ]) + + ax_ratio.set_xlabel("$p_T$ (GeV)", fontsize=12) + ax_ratio.set_ylabel("Data/MC SF", fontsize=12) + ax_ratio.set_ylim([0.5, 1.49]) + + validity_plt = ax.plot( [ ch_validity_thrs[channel] ] * 2, ax.get_ylim(), 'r--' ) + ax_ratio.plot( [ ch_validity_thrs[channel] ] * 2, ax_ratio.get_ylim(), 'r--' ) + + if(mode == "adaptive"): ## ----Christian's fix for equal eff. in data and mc ------ + print("Applying the fix for the case when eff. lists in data and mc have same lengths") + if len(eff_data.y) == len(eff_mc.y): + ax_ratio.errorbar(eff_data.x, [ eff_data.y[i]/eff_mc.y[i] for i in range(len(eff_data.y)) ], xerr=(eff_data.x_error_low, eff_data.x_error_high), + yerr=( [ math.sqrt((eff_data.y_error_low[i]/eff_data.y[i])**2 + (eff_mc.y_error_high[i]/eff_mc.y[i])**2) for i in range(len(eff_data.y)) ], + [ math.sqrt((eff_data.y_error_high[i]/eff_data.y[i])**2 + (eff_mc.y_error_low[i]/eff_mc.y[i])**2) for i in range(len(eff_data.y)) ] ), + fmt=data_color+'.', markersize=5) + elif(mode == "fixed"): + print("Not applying the fix for the case when eff. lists in data and mc have same lengths") + + ax.legend([ plt_data, plt_mc, plt_data_fitted[0], plt_mc_fitted[0], validity_plt[0] ], + [ "Data", "MC", "Data fitted", "MC fitted", "Validity range"], fontsize=12, loc='lower right') + + plt.subplots_adjust(hspace=0) + pdf.savefig(bbox_inches='tight') + plt.close() + + out_name_pattern = '{{}}_{}_{}{}_{{}}'.format(channel, wp, dm_label) + output_file.WriteTObject(eff_data_root, out_name_pattern.format('data', 'eff'), 'Overwrite') + output_file.WriteTObject(eff_mc_root, out_name_pattern.format('mc', 'eff'), 'Overwrite') + if(mode == "adaptive"): ## ----Christian's adaptive binning scheme ----- + bin_edges = [] + for i in range(len(eff_data.x)): + if i == 0: + bin_edges.append(eff_data.x[i] - eff_data.x_error_low[i]) + bin_edges.append(eff_data.x[i] + eff_data.x_error_high[i]) + eff_data_fitted_hist = Histogram.CreateTH1(eff_data_fitted.y_pred, bin_edges, + eff_data_fitted.sigma_pred, fixed_step=False) + eff_mc_fitted_hist = Histogram.CreateTH1(eff_mc_fitted.y_pred, bin_edges, + eff_mc_fitted.sigma_pred, fixed_step=False) + elif(mode == "fixed"): ##----Konstantin's fixed binning case ------------------------ + bin_edges = [x_low, x_high] + print("bin_edges ", bin_edges) + eff_data_fitted_hist = Histogram.CreateTH1(eff_data_fitted.y_pred, bin_edges, + eff_data_fitted.sigma_pred, fixed_step=True) + eff_mc_fitted_hist = Histogram.CreateTH1(eff_mc_fitted.y_pred, bin_edges, + eff_mc_fitted.sigma_pred, fixed_step=True) + + sf_fitted_hist = eff_data_fitted_hist.Clone() + sf_fitted_hist.Divide(eff_mc_fitted_hist) + output_file.WriteTObject(eff_data_fitted_hist, out_name_pattern.format('data', 'fitted'), 'Overwrite') + output_file.WriteTObject(eff_mc_fitted_hist, out_name_pattern.format('mc', 'fitted'), 'Overwrite') + output_file.WriteTObject(sf_fitted_hist, out_name_pattern.format('sf', 'fitted'), 'Overwrite') + +output_file.Close() +print('All done.') diff --git a/TauTagAndProbe/python/fitTurnOnBinned.py b/TauTagAndProbe/python/fitTurnOnBinned.py new file mode 100644 index 00000000000..09a65456b7a --- /dev/null +++ b/TauTagAndProbe/python/fitTurnOnBinned.py @@ -0,0 +1,286 @@ +import argparse +import os +import sys +import math +import numpy as np +import matplotlib +matplotlib.use('Agg') +from matplotlib import pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +import scipy +from scipy import interpolate + +from sklearn.gaussian_process import GaussianProcessRegressor +from sklearn.gaussian_process.kernels import Kernel, KernelOperator, Matern, ConstantKernel, RBF, RationalQuadratic, \ + DotProduct, Exponentiation, Hyperparameter + +import ROOT +ROOT.gROOT.SetBatch(True) +ROOT.TH1.SetDefaultSumw2() + +path_prefix = '' if 'TauTriggerTools' in os.getcwd() else 'TauTriggerTools/' +sys.path.insert(0, path_prefix + 'Common/python') +from RootObjects import Histogram, Graph + +parser = argparse.ArgumentParser(description='Fit turn-on curves.') +parser.add_argument('--input', required=True, type=str, help="ROOT file with turn-on curves") +parser.add_argument('--output', required=True, type=str, help="output file prefix") +parser.add_argument('--channels', required=False, type=str, default='etau,mutau,ditau', help="channels to process") +parser.add_argument('--decay-modes', required=False, type=str, default='all,0,1,10,11', help="decay modes to process") +parser.add_argument('--working-points', required=False, type=str, + default='VVVLoose,VVLoose,VLoose,Loose,Medium,Tight,VTight,VVTight', + help="working points to process") +args = parser.parse_args() + + +class BinnedRBF(Kernel): + def __init__(self, bins, y_err, l=1.0, l_bounds=(1e-10, 1e10)): + self.bins = bins + self.y_err = y_err + self.l = l + self.l_bounds = l_bounds + + def is_stationary(self): + return False + + @property + def hyperparameter_l(self): + return Hyperparameter("l", "numeric", self.l_bounds) + + @staticmethod + def _UtilityFn1(z): + return z * math.sqrt(math.pi) * scipy.special.erf(z) + np.exp(-(z ** 2)) + + @staticmethod + def _UtilityFn2(z): + return z * math.sqrt(math.pi) / 2 * scipy.special.erf(z) + np.exp(-(z ** 2)) + + @staticmethod + def _UtilityFn3(z): + return z * math.sqrt(math.pi) / 2 * scipy.special.erf(z) - z * np.exp(-(z ** 2)) + + def FindXYerror(self, a): + a = np.minimum(self.bins[-1] - 1e-5, a) + a = np.maximum(self.bins[0], a) + idx = np.digitize(a, self.bins) + return (self.bins[idx] - self.bins[idx-1]) / math.sqrt(12), self.y_err[idx - 1], idx + #return (a - self.bins[0]) / 2 + 1 + #return np.exp((a - self.bins[0]) / 100) + + def FindMutualXYerror(self, a, b): + a_x_err, a_y_err, a_idx = self.FindXYerror(a) + b_x_err, b_y_err, b_idx = self.FindXYerror(b) + L = self.l * (a_x_err[:, np.newaxis] + b_x_err[np.newaxis, :]) / 2 + err_2d = np.tile(a_y_err, (b.shape[0], 1)).T + noise = np.where(a_idx[:, np.newaxis] == b_idx[np.newaxis, :], err_2d ** 2, 0) + return L, noise + + def NormDelta(self, a, b, L): + return (a[:, np.newaxis] - b[np.newaxis, :]) / L + + def k_ff(self, X, Y): + u = X[:, 0] + up = Y[:, 0] + L, noise = self.FindMutualXYerror(u, up) + return np.exp(- self.NormDelta(u, up, L) ** 2 ) + noise + + def k_FF(self, X, Y): + s = X[:, 0] + t = X[:, 1] + sp = Y[:, 0] + tp = Y[:, 1] + fn = BinnedRBF._UtilityFn1 + L, noise = self.FindMutualXYerror(s, sp) + d = [ self.NormDelta(t, sp, L), self.NormDelta(s, tp, L), + self.NormDelta(t, tp, L), self.NormDelta(s, sp, L) ] + sum = fn(d[0]) + fn(d[1]) - fn(d[2]) - fn(d[3]) + norm = (t - s)[:, np.newaxis] * (tp - sp)[np.newaxis, :] + return ((L ** 2) / 2 * sum + noise) / norm + + def k_Ff(self, X, Y): + s = X[:, 0] + t = X[:, 1] + tp = Y[:, 0] + fn = scipy.special.erf + L, noise = self.FindMutualXYerror(s, tp) + d = [ self.NormDelta(t, tp, L), self.NormDelta(s, tp, L) ] + sum = fn(d[0]) - fn(d[1]) + #norm = np.tile(t - s, (tp.shape[0], 1)).T + norm = (t - s)[:, np.newaxis] + print(noise.shape, norm.shape, L.shape) + return (L * math.sqrt(math.pi) / 2 * sum + noise) / norm + + def k_ff_grad_l(self, X, Y): + u = X[:, 0] + up = Y[:, 0] + L, noise = self.FindMutualXYerror(u, up) + delta2 = self.NormDelta(u, up, L) ** 2 + return 2 * delta2 / L * np.exp(-delta2) + + def k_FF_grad_l(self, X, Y): + s = X[:, 0] + t = X[:, 1] + sp = Y[:, 0] + tp = Y[:, 1] + fn = BinnedRBF._UtilityFn2 + L, noise = self.FindMutualXYerror(s, sp) + d = [ self.NormDelta(t, sp, L), self.NormDelta(s, tp, L), + self.NormDelta(t, tp, L), self.NormDelta(s, sp, L) ] + sum = fn(d[0]) + fn(d[1]) - fn(d[2]) - fn(d[3]) + norm = (t - s)[:, np.newaxis] * (tp - sp)[np.newaxis, :] + return L * sum / norm + + def k_Ff_grad_l(self, X, Y): + s = X[:, 0] + t = X[:, 1] + tp = Y[:, 0] + fn = BinnedRBF._UtilityFn3 + L, noise = self.FindMutualXYerror(s, tp) + sum = fn(self.NormDelta(t, tp, L)) + fn(-self.NormDelta(s, tp, L)) + norm = (t - s)[:, np.newaxis] + return sum / norm + + def __call__(self, X, Y=None, eval_gradient=False): + if Y is None: + x = X + y = X + else: + x = X + y = Y + ndim_x = x.shape[1] + ndim_y = y.shape[1] + + transpose = False + if ndim_x == 1 and ndim_y == 1: + K_fn = self.k_ff + K_grad_fn = self.k_ff_grad_l + elif ndim_x == 2 and ndim_y == 2: + K_fn = self.k_FF + K_grad_fn = self.k_FF_grad_l + elif (ndim_x == 2 and ndim_y == 1) or (ndim_x == 1 and ndim_y == 2): + if ndim_x == 1 and ndim_y == 2: + x, y = y, x + transpose = True + K_fn = self.k_Ff + K_grad_fn = self.k_Ff_grad_l + else: + raise RuntimeError("BinnedRBF: mode ndim_x = {} and ndim_y = {} is not supported.".format(ndim_x, ndim_y)) + + K = K_fn(x, y) + if transpose: + K = K.T + if eval_gradient: + if self.hyperparameter_l.fixed: + K_grad = np.empty((K.shape[0], K.shape[1], 0)) + else: + K_grad = K_grad_fn(x, y) + if transpose: + K_grad = K_grad.T + K_grad = K_grad[:, :, np.newaxis] + return K, K_grad + return K + + def diag(self, X): + return np.copy(np.diag(self(X, X))) + +class FitResults: + def __init__(self, eff, x_pred, pred_bin_width): + N = eff.x.shape[0] + bins = np.zeros((N, 2)) + bins[:, 0] = eff.x - eff.x_error_low + bins[:, 1] = eff.x + eff.x_error_high + bins_1d = np.append(bins[:, 0], [ bins[-1, 1] ]) + + bin_width = bins[:, 1] - bins[:, 0] + yerr = np.maximum(eff.y_error_low, eff.y_error_high) + + #kernel = ConstantKernel() * BinnedRBF(bins_1d, yerr) + kernel = BinnedRBF(bins_1d, yerr) + self.gp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=10) + self.gp.fit(bins, eff.y) + + pred_bins = np.zeros((len(x_pred), 2)) + pred_bins[:, 0] = x_pred - pred_bin_width / 2 + pred_bins[:, 1] = x_pred + pred_bin_width / 2 + self.y_pred, self.sigma_pred = self.gp.predict(np.atleast_2d(x_pred).T, return_std=True) + #self.y_pred, self.sigma_pred = self.gp.predict(pred_bins, return_std=True) + #self.y_pred *= pred_bin_width + #self.sigma_pred *= pred_bin_width + +channels = args.channels.split(',') +decay_modes = args.decay_modes.split(',') +working_points = args.working_points.split(',') + +file = ROOT.TFile(args.input, 'READ') +output_file = ROOT.TFile('{}.root'.format(args.output), 'RECREATE', '', ROOT.RCompressionSetting.EDefaults.kUseSmallest) + +for channel in channels: + print('Processing {}'.format(channel)) + with PdfPages('{}_{}.pdf'.format(args.output, channel)) as pdf: + for wp in working_points: + for dm in decay_modes: + dm_label = '_dm{}'.format(dm) if dm != 'all' else '' + name_pattern = '{{}}_{}_{}{}_fit_eff'.format(channel, wp, dm_label) + dm_label = '_dm'+ dm if len(dm) > 0 else '' + eff_data_root = file.Get(name_pattern.format('data')) + eff_mc_root = file.Get(name_pattern.format('mc')) + eff_data = Graph(root_graph=eff_data_root) + eff_mc = Graph(root_graph=eff_mc_root) + pred_step = 0.1 + #x_low = min(eff_data.x[0] - eff_data.x_error_low[0], eff_mc.x[0] - eff_mc.x_error_low[0]) + #x_high = max(eff_data.x[-1] + eff_data.x_error_high[-1], eff_mc.x[-1] + eff_mc.x_error_high[-1]) + x_low, x_high = 20, 1000 + x_pred = np.arange(x_low, x_high + pred_step / 2, pred_step) + + eff_data_fitted = FitResults(eff_data, x_pred, pred_step) + eff_mc_fitted = FitResults(eff_mc, x_pred, pred_step) + + fig, (ax, ax_ratio) = plt.subplots(2, 1, figsize=(7, 7), sharex=True, + gridspec_kw = {'height_ratios':[2, 1]}) + mc_color = 'g' + data_color = 'k' + trans = 0.3 + + ax.errorbar(eff_mc.x, eff_mc.y, xerr=(eff_mc.x_error_low, eff_mc.x_error_high), + yerr=(eff_mc.y_error_low, eff_mc.y_error_high), fmt=mc_color+'.', markersize=5) + ax.errorbar(eff_data.x, eff_data.y, xerr=(eff_data.x_error_low, eff_data.x_error_high), + yerr=(eff_data.y_error_low, eff_data.y_error_high), fmt=data_color+'.', markersize=5) + ax.plot(x_pred, eff_mc_fitted.y_pred, mc_color+'--') + ax.fill(np.concatenate([x_pred, x_pred[::-1]]), + np.concatenate([eff_mc_fitted.y_pred - eff_mc_fitted.sigma_pred, + (eff_mc_fitted.y_pred + eff_mc_fitted.sigma_pred)[::-1]]), + alpha=trans, fc=mc_color, ec='None') + ax.plot(x_pred, eff_data_fitted.y_pred, data_color+'--') + ax.fill(np.concatenate([x_pred, x_pred[::-1]]), + np.concatenate([eff_data_fitted.y_pred - eff_data_fitted.sigma_pred, + (eff_data_fitted.y_pred + eff_data_fitted.sigma_pred)[::-1]]), + alpha=trans, fc=data_color, ec='None') + + title = "Turn-ons for {} trigger with {} DeepTau VSjet".format(channel, wp) + if dm != 'all': + title += " for DM={}".format(dm) + else: + title += " for all DMs" + plt.title(title, fontsize=16) + plt.xlabel("$p_T$ (GeV)", fontsize=12) + plt.ylabel("Efficiency", fontsize=12) + plt.ylim([ 0., 1.1 ]) + plt.xlim([ 20, min(200, plt.xlim()[1]) ]) + pdf.savefig(bbox_inches='tight') + plt.close() + + out_name_pattern = '{{}}_{}_{}{}_{{}}'.format(channel, wp, dm_label) + output_file.WriteTObject(eff_data_root, out_name_pattern.format('data', 'eff'), 'Overwrite') + output_file.WriteTObject(eff_mc_root, out_name_pattern.format('mc', 'eff'), 'Overwrite') + eff_data_fitted_hist = Histogram.CreateTH1(eff_data_fitted.y_pred, [x_low, x_high], + eff_data_fitted.sigma_pred, fixed_step=True) + eff_mc_fitted_hist = Histogram.CreateTH1(eff_mc_fitted.y_pred, [x_low, x_high], + eff_mc_fitted.sigma_pred, fixed_step=True) + sf_fitted_hist = eff_data_fitted_hist.Clone() + sf_fitted_hist.Divide(eff_mc_fitted_hist) + output_file.WriteTObject(eff_data_fitted_hist, out_name_pattern.format('data', 'fitted'), 'Overwrite') + output_file.WriteTObject(eff_mc_fitted_hist, out_name_pattern.format('mc', 'fitted'), 'Overwrite') + output_file.WriteTObject(sf_fitted_hist, out_name_pattern.format('sf', 'fitted'), 'Overwrite') + +output_file.Close() +print('All done.') diff --git a/TauTagAndProbe/python/runTauIdMVA.py b/TauTagAndProbe/python/runTauIdMVA.py deleted file mode 100644 index 40fd48b4555..00000000000 --- a/TauTagAndProbe/python/runTauIdMVA.py +++ /dev/null @@ -1,579 +0,0 @@ -from RecoTauTag.RecoTau.TauDiscriminatorTools import noPrediscriminants -from RecoTauTag.RecoTau.PATTauDiscriminationByMVAIsolationRun2_cff import patDiscriminationByIsolationMVArun2v1raw, patDiscriminationByIsolationMVArun2v1VLoose -import os - -class TauIDEmbedder(object): - """class to rerun the tau seq and acces trainings from the database""" - - def __init__(self, process, cms, debug = False, - toKeep = ["2016v1", "newDM2016v1"], - tauIdDiscrMVA_trainings_run2_2017 = { - 'tauIdMVAIsoDBoldDMwLT2017' : "tauIdMVAIsoDBoldDMwLT2017", - }, - tauIdDiscrMVA_WPs_run2_2017 = { - 'tauIdMVAIsoDBoldDMwLT2017' : { - 'Eff95' : "DBoldDMwLTEff95", - 'Eff90' : "DBoldDMwLTEff90", - 'Eff80' : "DBoldDMwLTEff80", - 'Eff70' : "DBoldDMwLTEff70", - 'Eff60' : "DBoldDMwLTEff60", - 'Eff50' : "DBoldDMwLTEff50", - 'Eff40' : "DBoldDMwLTEff40" - } - }, - tauIdDiscrMVA_2017_version = "v1", - conditionDB = "" # preparational DB: 'frontier://FrontierPrep/CMS_CONDITIONS' - ): - super(TauIDEmbedder, self).__init__() - self.process = process - self.cms = cms - self.debug = debug - self.process.load('RecoTauTag.Configuration.loadRecoTauTagMVAsFromPrepDB_cfi') - if len(conditionDB) != 0: - self.process.CondDBTauConnection.connect = cms.string(conditionDB) - self.process.loadRecoTauTagMVAsFromPrepDB.connect = cms.string(conditionDB) - # if debug: - # print self.process.CondDBTauConnection.connect - # print dir(self.process.loadRecoTauTagMVAsFromPrepDB) - # print self.process.loadRecoTauTagMVAsFromPrepDB.parameterNames_ - - self.tauIdDiscrMVA_trainings_run2_2017 = tauIdDiscrMVA_trainings_run2_2017 - self.tauIdDiscrMVA_WPs_run2_2017 = tauIdDiscrMVA_WPs_run2_2017 - self.tauIdDiscrMVA_2017_version = tauIdDiscrMVA_2017_version - self.toKeep = toKeep - - - @staticmethod - def get_cmssw_version(debug = False): - """returns 'CMSSW_X_Y_Z'""" - if debug: print "get_cmssw_version:", os.environ["CMSSW_RELEASE_BASE"].split('/')[-1] - return os.environ["CMSSW_RELEASE_BASE"].split('/')[-1] - - @classmethod - def get_cmssw_version_number(klass, debug = False): - """returns 'X_Y_Z' (without 'CMSSW_')""" - if debug: print "get_cmssw_version_number:", map(int, klass.get_cmssw_version().split("CMSSW_")[1].split("_")[0:3]) - return map(int, klass.get_cmssw_version().split("CMSSW_")[1].split("_")[0:3]) - - @staticmethod - def versionToInt(release=9, subversion=4, patch=0, debug = False): - if debug: print "versionToInt:", release * 10000 + subversion * 100 + patch - return release * 10000 + subversion * 100 + patch - - @classmethod - def is_above_cmssw_version(klass, release=9, subversion=4, patch=0, debug = False): - split_cmssw_version = klass.get_cmssw_version_number() - if klass.versionToInt(release, subversion, patch) > klass.versionToInt(split_cmssw_version[0], split_cmssw_version[1], split_cmssw_version[2]): - if debug: print "is_above_cmssw_version:", False - return False - else: - if debug: print "is_above_cmssw_version:", True - return True - - def loadMVA_WPs_run2_2017(self): - if self.debug: print "loadMVA_WPs_run2_2017: performed" - global cms - for training, gbrForestName in self.tauIdDiscrMVA_trainings_run2_2017.items(): - - self.process.loadRecoTauTagMVAsFromPrepDB.toGet.append( - self.cms.PSet( - record = self.cms.string('GBRWrapperRcd'), - tag = self.cms.string("RecoTauTag_%s%s" % (gbrForestName, self.tauIdDiscrMVA_2017_version)), - label = self.cms.untracked.string("RecoTauTag_%s%s" % (gbrForestName, self.tauIdDiscrMVA_2017_version)) - ) - ) - - for WP in self.tauIdDiscrMVA_WPs_run2_2017[training].keys(): - self.process.loadRecoTauTagMVAsFromPrepDB.toGet.append( - self.cms.PSet( - record = self.cms.string('PhysicsTGraphPayloadRcd'), - tag = self.cms.string("RecoTauTag_%s%s_WP%s" % (gbrForestName, self.tauIdDiscrMVA_2017_version, WP)), - label = self.cms.untracked.string("RecoTauTag_%s%s_WP%s" % (gbrForestName, self.tauIdDiscrMVA_2017_version, WP)) - ) - ) - - self.process.loadRecoTauTagMVAsFromPrepDB.toGet.append( - self.cms.PSet( - record = self.cms.string('PhysicsTFormulaPayloadRcd'), - tag = self.cms.string("RecoTauTag_%s%s_mvaOutput_normalization" % (gbrForestName, self.tauIdDiscrMVA_2017_version)), - label = self.cms.untracked.string("RecoTauTag_%s%s_mvaOutput_normalization" % (gbrForestName, self.tauIdDiscrMVA_2017_version)) - ) - ) - - def runTauID(self, name='NewTauIDsEmbedded'): - self.process.rerunMvaIsolationSequence = self.cms.Sequence() - tauIDSources = self.cms.PSet() - - # rerun the seq to obtain the 2017 nom training with 0.5 iso cone, old DM, ptph>1, trained on 2017MCv1 - if "2017v1" in self.toKeep: - self.tauIdDiscrMVA_2017_version = "v1" - self.tauIdDiscrMVA_trainings_run2_2017 = { - 'tauIdMVAIsoDBoldDMwLT2017' : "tauIdMVAIsoDBoldDMwLT2017", - } - self.tauIdDiscrMVA_WPs_run2_2017 = { - 'tauIdMVAIsoDBoldDMwLT2017' : { - 'Eff95' : "DBoldDMwLTEff95", - 'Eff90' : "DBoldDMwLTEff90", - 'Eff80' : "DBoldDMwLTEff80", - 'Eff70' : "DBoldDMwLTEff70", - 'Eff60' : "DBoldDMwLTEff60", - 'Eff50' : "DBoldDMwLTEff50", - 'Eff40' : "DBoldDMwLTEff40" - } - } - # update the list of available in DB samples - if not self.is_above_cmssw_version(11, 0, 0, self.debug): - if self.debug: print "runTauID: not is_above_cmssw_version(10, 0, 0). Will update the list of available in DB samples to access 2017v1" - self.loadMVA_WPs_run2_2017() - - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1raw = patDiscriminationByIsolationMVArun2v1raw.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - loadMVAfromDB = self.cms.bool(True), - mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1"),#RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1 writeTauIdDiscrMVAs - mvaOpt = self.cms.string("DBoldDMwLTwGJ"), - requireDecayMode = self.cms.bool(True), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1raw'), - key = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1raw:category'),#? - loadMVAfromDB = self.cms.bool(True), - mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_mvaOutput_normalization"), #writeTauIdDiscrMVAoutputNormalizations - mapping = self.cms.VPSet( - self.cms.PSet( - category = self.cms.uint32(0), - cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff90"), #writeTauIdDiscrWPs - variable = self.cms.string("pt"), - ) - ) - ) - - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VVLoose = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VVLoose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff95") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Loose = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Loose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff80") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Medium = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Medium.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff70") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Tight = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Tight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff60") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VTight = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff50") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VVTight = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VVTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v1_WPEff40") - - self.process.rerunMvaIsolationSequence += self.cms.Sequence( - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1raw - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VVLoose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Loose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Medium - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1Tight - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VTight - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v1VVTight - ) - - tauIDSources.byIsolationMVArun2017v1DBoldDMwLTraw2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1raw') - tauIDSources.byVVLooseIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1VVLoose') - tauIDSources.byVLooseIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1VLoose') - tauIDSources.byLooseIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1Loose') - tauIDSources.byMediumIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1Medium') - tauIDSources.byTightIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1Tight') - tauIDSources.byVTightIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1VTight') - tauIDSources.byVVTightIsolationMVArun2017v1DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v1VVTight') - - - if "2017v2" in self.toKeep: - self.tauIdDiscrMVA_2017_version = "v2" - self.tauIdDiscrMVA_trainings_run2_2017 = { - 'tauIdMVAIsoDBoldDMwLT2017' : "tauIdMVAIsoDBoldDMwLT2017", - } - self.tauIdDiscrMVA_WPs_run2_2017 = { - 'tauIdMVAIsoDBoldDMwLT2017' : { - 'Eff95' : "DBoldDMwLTEff95", - 'Eff90' : "DBoldDMwLTEff90", - 'Eff80' : "DBoldDMwLTEff80", - 'Eff70' : "DBoldDMwLTEff70", - 'Eff60' : "DBoldDMwLTEff60", - 'Eff50' : "DBoldDMwLTEff50", - 'Eff40' : "DBoldDMwLTEff40" - } - } - - if self.debug: print "runTauID: not is_above_cmssw_version(10, 0, 0). Will update the list of available in DB samples to access 2017v2" - self.loadMVA_WPs_run2_2017() - - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2raw = patDiscriminationByIsolationMVArun2v1raw.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - loadMVAfromDB = self.cms.bool(True), - mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2"),#RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1 writeTauIdDiscrMVAs - mvaOpt = self.cms.string("DBoldDMwLTwGJ"), - requireDecayMode = self.cms.bool(True), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2raw'), - key = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2raw:category'),#? - loadMVAfromDB = self.cms.bool(True), - mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_mvaOutput_normalization"), #writeTauIdDiscrMVAoutputNormalizations - mapping = self.cms.VPSet( - self.cms.PSet( - category = self.cms.uint32(0), - cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff90"), #writeTauIdDiscrWPs - variable = self.cms.string("pt"), - ) - ), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VVLoose = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VVLoose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff95") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Loose = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Loose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff80") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Medium = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Medium.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff70") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Tight = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Tight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff60") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VTight = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff50") - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VVTight = self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VVTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2017v2_WPEff40") - - self.process.rerunMvaIsolationSequence += self.cms.Sequence( - self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2raw - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VVLoose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Loose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Medium - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2Tight - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VTight - *self.process.rerunDiscriminationByIsolationOldDMMVArun2017v2VVTight - ) - - tauIDSources.byIsolationMVArun2017v2DBoldDMwLTraw2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2raw') - tauIDSources.byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2VVLoose') - tauIDSources.byVLooseIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2VLoose') - tauIDSources.byLooseIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2Loose') - tauIDSources.byMediumIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2Medium') - tauIDSources.byTightIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2Tight') - tauIDSources.byVTightIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2VTight') - tauIDSources.byVVTightIsolationMVArun2017v2DBoldDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2017v2VVTight') - - if "newDM2017v2" in self.toKeep: - self.tauIdDiscrMVA_2017_version = "v2" - self.tauIdDiscrMVA_trainings_run2_2017 = { - 'tauIdMVAIsoDBnewDMwLT2017' : "tauIdMVAIsoDBnewDMwLT2017", - } - self.tauIdDiscrMVA_WPs_run2_2017 = { - 'tauIdMVAIsoDBnewDMwLT2017' : { - 'Eff95' : "DBnewDMwLTEff95", - 'Eff90' : "DBnewDMwLTEff90", - 'Eff80' : "DBnewDMwLTEff80", - 'Eff70' : "DBnewDMwLTEff70", - 'Eff60' : "DBnewDMwLTEff60", - 'Eff50' : "DBnewDMwLTEff50", - 'Eff40' : "DBnewDMwLTEff40" - } - } - - if self.debug: print "runTauID: not is_above_cmssw_version(10, 0, 0). Will update the list of available in DB samples to access newDM2017v2" - self.loadMVA_WPs_run2_2017() - - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2raw = patDiscriminationByIsolationMVArun2v1raw.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - loadMVAfromDB = self.cms.bool(True), - mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2"),#RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1 writeTauIdDiscrMVAs - mvaOpt = self.cms.string("DBnewDMwLTwGJ"), - requireDecayMode = self.cms.bool(True), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2raw'), - key = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2raw:category'),#? - loadMVAfromDB = self.cms.bool(True), - mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_mvaOutput_normalization"), #writeTauIdDiscrMVAoutputNormalizations - mapping = self.cms.VPSet( - self.cms.PSet( - category = self.cms.uint32(0), - cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff90"), #writeTauIdDiscrWPs - variable = self.cms.string("pt"), - ) - ), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VVLoose = self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VVLoose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff95") - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Loose = self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Loose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff80") - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Medium = self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Medium.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff70") - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Tight = self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Tight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff60") - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VTight = self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff50") - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VVTight = self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VVTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2017v2_WPEff40") - - self.process.rerunMvaIsolationSequence += self.cms.Sequence( - self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2raw - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VVLoose - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Loose - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Medium - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2Tight - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VTight - *self.process.rerunDiscriminationByIsolationNewDMMVArun2017v2VVTight - ) - - tauIDSources.byIsolationMVArun2017v2DBnewDMwLTraw2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2raw') - tauIDSources.byVVLooseIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2VVLoose') - tauIDSources.byVLooseIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2VLoose') - tauIDSources.byLooseIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2Loose') - tauIDSources.byMediumIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2Medium') - tauIDSources.byTightIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2Tight') - tauIDSources.byVTightIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2VTight') - tauIDSources.byVVTightIsolationMVArun2017v2DBnewDMwLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2017v2VVTight') - - if "dR0p32017v2" in self.toKeep: - self.tauIdDiscrMVA_2017_version = "v2" - self.tauIdDiscrMVA_trainings_run2_2017 = { - 'tauIdMVAIsoDBoldDMdR0p3wLT2017' : "tauIdMVAIsoDBoldDMdR0p3wLT2017", - } - self.tauIdDiscrMVA_WPs_run2_2017 = { - 'tauIdMVAIsoDBoldDMdR0p3wLT2017' : { - 'Eff95' : "DBoldDMdR0p3wLTEff95", - 'Eff90' : "DBoldDMdR0p3wLTEff90", - 'Eff80' : "DBoldDMdR0p3wLTEff80", - 'Eff70' : "DBoldDMdR0p3wLTEff70", - 'Eff60' : "DBoldDMdR0p3wLTEff60", - 'Eff50' : "DBoldDMdR0p3wLTEff50", - 'Eff40' : "DBoldDMdR0p3wLTEff40" - } - } - - if self.debug: print "runTauID: not is_above_cmssw_version(10, 0, 0). Will update the list of available in DB samples to access dR0p32017v2" - self.loadMVA_WPs_run2_2017() - - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2raw = patDiscriminationByIsolationMVArun2v1raw.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - loadMVAfromDB = self.cms.bool(True), - mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2"), - mvaOpt = self.cms.string("DBoldDMwLTwGJ"), - requireDecayMode = self.cms.bool(True), - srcChargedIsoPtSum = self.cms.string('chargedIsoPtSumdR03'), - srcFootprintCorrection = self.cms.string('footprintCorrectiondR03'), - srcNeutralIsoPtSum = self.cms.string('neutralIsoPtSumdR03'), - srcPhotonPtSumOutsideSignalCone = self.cms.string('photonPtSumOutsideSignalConedR03'), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2raw'), - key = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2raw:category'),#? - loadMVAfromDB = self.cms.bool(True), - mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_mvaOutput_normalization"), #writeTauIdDiscrMVAoutputNormalizations - mapping = self.cms.VPSet( - self.cms.PSet( - category = self.cms.uint32(0), - cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff90"), #writeTauIdDiscrWPs - variable = self.cms.string("pt"), - ) - ), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVLoose = self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVLoose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff95") - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Loose = self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Loose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff80") - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Medium = self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Medium.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff70") - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Tight = self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Tight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff60") - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VTight = self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff50") - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVTight = self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMdR0p3wLT2017v2_WPEff40") - - self.process.rerunMvaIsolationSequence += self.cms.Sequence( - self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2raw - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVLoose - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Loose - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Medium - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Tight - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VTight - *self.process.rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVTight - ) - - tauIDSources.byIsolationMVArun2017v2DBoldDMdR0p3wLTraw2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2raw') - tauIDSources.byVVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVLoose') - tauIDSources.byVLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VLoose') - tauIDSources.byLooseIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Loose') - tauIDSources.byMediumIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Medium') - tauIDSources.byTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2Tight') - tauIDSources.byVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VTight') - tauIDSources.byVVTightIsolationMVArun2017v2DBoldDMdR0p3wLT2017 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMdR0p3MVArun2017v2VVTight') - - # 2016 training strategy(v2) - essentially the same as 2017 training strategy (v1), trained on 2016MC, old DM - currently not implemented in the tau sequence of any release - # self.process.rerunDiscriminationByIsolationOldDMMVArun2v2raw = patDiscriminationByIsolationMVArun2v1raw.clone( - # PATTauProducer = self.cms.InputTag('slimmedTaus'), - # Prediscriminants = noPrediscriminants, - # loadMVAfromDB = self.cms.bool(True), - # mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v2"),#RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1 writeTauIdDiscrMVAs - # mvaOpt = self.cms.string("DBoldDMwLTwGJ"), - # requireDecayMode = self.cms.bool(True), - # verbosity = self.cms.int32(0) - # ) - # # - # self.process.rerunDiscriminationByIsolationOldDMMVArun2v2VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - # PATTauProducer = self.cms.InputTag('slimmedTaus'), - # Prediscriminants = noPrediscriminants, - # toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v2raw'), - # key = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v2raw:category'),#? - # loadMVAfromDB = self.cms.bool(True), - # mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v2_mvaOutput_normalization"), #writeTauIdDiscrMVAoutputNormalizations - # mapping = self.cms.VPSet( - # self.cms.PSet( - # category = self.cms.uint32(0), - # cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v2_WPEff90"), #writeTauIdDiscrWPs - # variable = self.cms.string("pt"), - # ) - # ) - # ) - - # 2016 training strategy(v1), trained on 2016MC, old DM - if "2016v1" in self.toKeep: - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1raw = patDiscriminationByIsolationMVArun2v1raw.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - loadMVAfromDB = self.cms.bool(True), - mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1"), - mvaOpt = self.cms.string("DBoldDMwLT"), - requireDecayMode = self.cms.bool(True), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1raw'), - key = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1raw:category'), - loadMVAfromDB = self.cms.bool(True), - mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_mvaOutput_normalization"), - mapping = self.cms.VPSet( - self.cms.PSet( - category = self.cms.uint32(0), - cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_WPEff90"), - variable = self.cms.string("pt"), - ) - ) - ) - - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Loose = self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Loose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_WPEff80") - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Medium = self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Medium.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_WPEff70") - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Tight = self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Tight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_WPEff60") - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VTight = self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_WPEff50") - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VVTight = self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VVTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBoldDMwLT2016v1_WPEff40") - - self.process.rerunMvaIsolationSequence += self.cms.Sequence( - self.process.rerunDiscriminationByIsolationOldDMMVArun2v1raw - *self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VLoose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Loose - *self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Medium - *self.process.rerunDiscriminationByIsolationOldDMMVArun2v1Tight - *self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VTight - *self.process.rerunDiscriminationByIsolationOldDMMVArun2v1VVTight - ) - - tauIDSources.byIsolationMVArun2v1DBoldDMwLTraw2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1raw') - tauIDSources.byVLooseIsolationMVArun2v1DBoldDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1VLoose') - tauIDSources.byLooseIsolationMVArun2v1DBoldDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1Loose') - tauIDSources.byMediumIsolationMVArun2v1DBoldDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1Medium') - tauIDSources.byTightIsolationMVArun2v1DBoldDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1Tight') - tauIDSources.byVTightIsolationMVArun2v1DBoldDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1VTight') - tauIDSources.byVVTightIsolationMVArun2v1DBoldDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationOldDMMVArun2v1VVTight') - - # 2016 training strategy(v1), trained on 2016MC, new DM - if "newDM2016v1" in self.toKeep: - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1raw = patDiscriminationByIsolationMVArun2v1raw.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - loadMVAfromDB = self.cms.bool(True), - mvaName = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1"), - mvaOpt = self.cms.string("DBnewDMwLT"), - requireDecayMode = self.cms.bool(True), - verbosity = self.cms.int32(0) - ) - - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose = patDiscriminationByIsolationMVArun2v1VLoose.clone( - PATTauProducer = self.cms.InputTag('slimmedTaus'), - Prediscriminants = noPrediscriminants, - toMultiplex = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1raw'), - key = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1raw:category'), - loadMVAfromDB = self.cms.bool(True), - mvaOutput_normalization = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_mvaOutput_normalization"), - mapping = self.cms.VPSet( - self.cms.PSet( - category = self.cms.uint32(0), - cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_WPEff90"), - variable = self.cms.string("pt"), - ) - ) - ) - - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Loose = self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Loose.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_WPEff80") - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Medium = self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Medium.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_WPEff70") - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Tight = self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Tight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_WPEff60") - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VTight = self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_WPEff50") - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VVTight = self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose.clone() - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VVTight.mapping[0].cut = self.cms.string("RecoTauTag_tauIdMVAIsoDBnewDMwLT2016v1_WPEff40") - - self.process.rerunMvaIsolationSequence += self.cms.Sequence( - self.process.rerunDiscriminationByIsolationNewDMMVArun2v1raw - *self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VLoose - *self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Loose - *self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Medium - *self.process.rerunDiscriminationByIsolationNewDMMVArun2v1Tight - *self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VTight - *self.process.rerunDiscriminationByIsolationNewDMMVArun2v1VVTight - ) - - tauIDSources.byIsolationMVArun2v1DBnewDMwLTraw2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1raw') - tauIDSources.byVLooseIsolationMVArun2v1DBnewDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1VLoose') - tauIDSources.byLooseIsolationMVArun2v1DBnewDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1Loose') - tauIDSources.byMediumIsolationMVArun2v1DBnewDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1Medium') - tauIDSources.byTightIsolationMVArun2v1DBnewDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1Tight') - tauIDSources.byVTightIsolationMVArun2v1DBnewDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1VTight') - tauIDSources.byVVTightIsolationMVArun2v1DBnewDMwLT2016 = self.cms.InputTag('rerunDiscriminationByIsolationNewDMMVArun2v1VVTight') - - embedID = self.cms.EDProducer("PATTauIDEmbedder", - src = self.cms.InputTag('slimmedTaus'), - tauIDSources = tauIDSources - ) - self.process.NewTauIDsEmbedded = embedID - #setattr(process, "NewTauIDsEmbedded", embedID) diff --git a/TauTagAndProbe/python/skimTuple.py b/TauTagAndProbe/python/skimTuple.py new file mode 100644 index 00000000000..b66a95ccdb4 --- /dev/null +++ b/TauTagAndProbe/python/skimTuple.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python +import argparse +from array import array +import math +import numpy as np +import os +import re +import sys +import ROOT + +parser = argparse.ArgumentParser(description='Skim full tuple.') +parser.add_argument('--input', required=True, type=str, nargs='+', help="input files") +parser.add_argument('--config', required=True, type=str, help="config with triggers description") +parser.add_argument('--selection', required=True, type=str, help="tau selection") +parser.add_argument('--output', required=True, type=str, help="output file") +parser.add_argument('--type', required=True, type=str, default='data', help="Define the sample type among the following: data, ztt_mc, zmm_mc, w_mc, ttbar_mc") +parser.add_argument('--lumiScale', required=True, type=float, default=1.0, help="LumiScale factor") +parser.add_argument('--sideband', required=True, type=str, default='signal', + help="Event level selections to define sidebands: signal, w_enriched, OS_low_mT, OS_high_mT, SS_low_mT, SS_high_mT") +parser.add_argument('--pu', required=False, type=str, default=None, + help="file with the pileup profile for the data taking period") +args = parser.parse_args() + +path_prefix = '' if 'TauTriggerTools' in os.getcwd() else 'TauTriggerTools/' +sys.path.insert(0, path_prefix + 'Common/python') +from AnalysisTypes import * +from AnalysisTools import * +import TriggerConfig +ROOT.ROOT.EnableImplicitMT(4) +ROOT.gROOT.SetBatch(True) +ROOT.gInterpreter.Declare('#include "{}TauTagAndProbe/interface/PyInterface.h"'.format(path_prefix)) + +if args.type not in ["data", "ztt_mc", "zmm_mc", "w_mc", "ttbar_mc"]: + raise RuntimeError("Invalid sample type") + +if args.sideband not in ['signal', 'w_enriched', 'OS_low_mT','OS_high_mT','SS_low_mT','SS_high_mT']: + raise RuntimeError("Invalid sideband") +sideband = args.sideband +LumiScale = args.lumiScale +input_vec = ListToStdVector(args.input) + +if args.type != 'data': + if args.pu is None: + raise RuntimeError("Pileup file should be provided for mc.") + data_pu_file = ROOT.TFile(args.pu, 'READ') + data_pu = data_pu_file.Get('pileup') + df_all = ROOT.RDataFrame('all_events', input_vec) + mc_pu = df_all.Histo1D(ROOT.RDF.TH1DModel(data_pu), 'npu') + ROOT.PileUpWeightProvider.Initialize(data_pu, mc_pu.GetPtr()) + +trig_descriptors, channel_triggers = TriggerConfig.Load(args.config) +trigger_dict, filter_dict = TriggerConfig.LoadTriggerDictionary(input_vec) +triggerMatch = ROOT.TriggerMatchProvider.Initialize() +channels = {} +for channel_name, channel_trig_descs in channel_triggers.items(): + channel_id = ParseEnum(Channel, channel_name) + channels[channel_name] = channel_id + for desc in channel_trig_descs: + if 'sample_types' in desc and args.type not in desc['sample_types']: continue + if desc['leg_types'][-1] != 'tau': continue + match_desc = ROOT.TriggerMatchProvider.MatchDescriptor() + pattern = '^{}.*'.format(desc['name']) + hlt_paths = TriggerConfig.GetMatchedTriggers(trigger_dict, pattern) + match_desc.match_mask = int(TriggerConfig.GetMatchMask(hlt_paths)) + filter_names = desc['filters'][-1] + match_desc.filter_hashes = ListToStdVector([ filter_dict[f] for f in filter_names ], elem_type='UInt_t') + if 'min_run' in desc and args.type == 'data': + match_desc.min_run = desc['min_run'] + if 'max_run' in desc and args.type == 'data': + match_desc.max_run = desc['max_run'] + sel_name = 'selection_' + channel_name + if sel_name in desc: + if 'hltObj_pt' in desc[sel_name]: + match_desc.hltObj_pt = desc[sel_name]['hltObj_pt'] + if 'l1Tau_pt' in desc[sel_name]: + match_desc.l1Tau_pt = desc[sel_name]['l1Tau_pt'] + if 'l1Tau_hwIso' in desc[sel_name]: + match_desc.l1Tau_hwIso = desc[sel_name]['l1Tau_hwIso'] + triggerMatch.Add(channel_id, match_desc) + +selection_id = ParseEnum(TauSelection, args.selection) +df = ROOT.RDataFrame('events', input_vec) +process_id = ParseEnum(Process, args.type) +sideband_id = ParseEnum(SideBand, args.sideband) +df = df.Define('type', str(process_id)) +df = df.Define('selection', str(sideband_id)) + +if((sideband == "signal") or (sideband == "OS_low_mT")): + df = df.Filter(''' + (tau_sel & {}) != 0 && muon_pt > 27 && muon_iso < 0.1 && muon_mt < 30 + && tau_pt > 20 && abs(tau_eta) < 2.1 && tau_decayMode != 5 && tau_decayMode != 6 + && vis_mass > 40 && vis_mass < 80 && muon_charge != tau_charge + '''.format(selection_id)) ## SIGNAL REGION +elif sideband == "w_enriched": + df = df.Filter(''' + (tau_sel & {}) != 0 && muon_pt > 27 && muon_iso < 0.1 && muon_mt > 50 + && tau_pt > 20 && abs(tau_eta) < 2.1 && tau_decayMode != 5 && tau_decayMode != 6 + '''.format(selection_id)) ## (W-ENRICHED SIDEBAND) +elif sideband == "SS_low_mT": + df = df.Filter(''' + (tau_sel & {}) != 0 && muon_pt > 27 && muon_iso < 0.1 && muon_mt < 30 + && tau_pt > 20 && abs(tau_eta) < 2.1 && tau_decayMode != 5 && tau_decayMode != 6 + && vis_mass > 40 && vis_mass < 80 && muon_charge == tau_charge + '''.format(selection_id)) +elif sideband == "OS_high_mT": + df = df.Filter(''' + (tau_sel & {}) != 0 && muon_pt > 27 && muon_iso < 0.1 && muon_mt > 70 && muon_mt < 120 + && tau_pt > 20 && abs(tau_eta) < 2.1 && tau_decayMode != 5 && tau_decayMode != 6 + && vis_mass > 40 && vis_mass < 80 && muon_charge != tau_charge + '''.format(selection_id)) +elif sideband == "SS_high_mT": + df = df.Filter(''' + (tau_sel & {}) != 0 && muon_pt > 27 && muon_iso < 0.1 && muon_mt > 70 && muon_mt < 120 + && tau_pt > 20 && abs(tau_eta) < 2.1 && tau_decayMode != 5 && tau_decayMode != 6 + && vis_mass > 40 && vis_mass < 80 && muon_charge == tau_charge + '''.format(selection_id)) + + +if selection_id == TauSelection.DeepTau: + df = df.Filter('(byDeepTau2017v2p1VSmu & (1 << {})) != 0'.format(DiscriminatorWP.Tight)) + +if args.type == 'data': + df = df.Define('puWeight', "float(1.)") + df = df.Define("genEventWeight_signOnly", "0.") + df = df.Define('lumiScale', str(LumiScale)) + if sideband == "w_enriched": + df = df.Define('weight', "muon_charge != tau_charge ? 1. : -1.") ## W-ENRICHED SIDEBAND FOR DATA + print("w_enriched data yield ", df.Sum("weight").GetValue()) + else: + df = df.Define('weight', "1.") +else: + if sideband == "w_enriched": + df = df.Filter('tau_charge + muon_charge == 0') ## W-ENRICHED SIDEBAND FOR MC + if args.type == 'ztt_mc': + print("Applying tau_gen_match == 5 cut to process ztt_mc in sideband {}".format(sideband)) + df = df.Filter('tau_gen_match == 5') + elif( (args.type == 'zmm_mc') or + ( ((sideband == "w_enriched") or (sideband == "signal")) + and ((args.type == 'w_mc') or (args.type == 'ttbar_mc')) ) ): + print("Applying tau_gen_match != 5 cut to process {} in sideband {}".format(args.type, sideband)) + df = df.Filter('tau_gen_match != 5') ## THIS CUT WASN'T APPLIED TO ANY OTHER "NON Z-MM" PROCESS IN ORIGINAL STUDY + df = df.Define('puWeight', "PileUpWeightProvider::GetDefault().GetWeight(npu)") + df = df.Define('genEventWeight_signOnly', "genEventWeight >= 0. ? +1. : -1.") + N_eff = float(df.Sum("genEventWeight_signOnly").GetValue()) + N_tot = float(df.Count().GetValue()) + print("N_tot = %1.2f, N_eff = %1.2f" % (N_tot, N_eff)) + df = df.Define('lumiScale', "%s * %1.2f / %1.2f" % (str(LumiScale), N_tot, N_eff)) # LumiScale = x-sec * Integ. Lumi. + print("lumiScale = %1.2f" % eval("%s * %1.2f / %1.2f" % (str(LumiScale), N_tot, N_eff))) + df = df.Define('weight', "puWeight * genEventWeight_signOnly * lumiScale") + +skimmed_branches = [ + 'type', 'selection', + 'puWeight', 'genEventWeight', 'genEventWeight_signOnly', 'lumiScale', 'weight', + 'tau_pt', 'tau_eta', 'tau_phi', 'tau_mass', 'tau_charge', 'tau_decayMode', + 'byIsolationMVArun2017v2DBoldDMwLT2017', 'byDeepTau2017v2p1VSjet' +] + +deltaRThr = 0.5 +for channel_name, channel_id in channels.items(): + pass_branch = str('pass_' + channel_name) + df = df.Define(pass_branch, '''TriggerMatchProvider::GetDefault().Pass({}, run, tau_eta, tau_phi, hlt_accept, {}, + hltObj_types, hltObj_pt, hltObj_eta, hltObj_phi, hltObj_hasPathName, filter_hltObj, filter_hash, + l1Tau_pt, l1Tau_hwIso)'''.format(channel_id, deltaRThr)) + skimmed_branches.append(pass_branch) + +df.Snapshot('events', args.output, ListToStdVector(skimmed_branches)) diff --git a/TauTagAndProbe/python/tagAndProbe_2016_cff.py b/TauTagAndProbe/python/tagAndProbe_2016_cff.py deleted file mode 100644 index 4559bdb600d..00000000000 --- a/TauTagAndProbe/python/tagAndProbe_2016_cff.py +++ /dev/null @@ -1,200 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on data" - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt - - -HLTLIST_TAG = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(-999) - ), -) - - -HLTLIST = cms.VPSet( - #MuTau - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTauJet20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterIsoMu19LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu18erIorSingleMu20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu19LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu19LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_LooseCombinedIsoPFTau20_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTauJet20erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19LooseCombinedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseCombinedIsoAgainstMuon", "hltOverlapFilterIsoMu19LooseCombinedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19MediumIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19MediumIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_MediumCombinedIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19MediumCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19MediumCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu19_eta2p1_TightCombinedIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erIsoTau26erL1f0L2f10QL3f19QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu19TightCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1TightCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu19TightCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - # the following ones are extra! - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu20erIorSingleMu22erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterSingleIsoMu21LooseIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseIsoAgainstMuon", "hltOverlapFilterSingleIsoMu21LooseIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21MediumIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21MediumIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_MediumCombinedIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21MediumCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1MediumCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21MediumCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu21_eta2p1_TightCombinedIsoPFTau32_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu20erIsoTau26erL1f0L2f10QL3f21QL3trkIsoFiltered0p09", "hltOverlapFilterIsoMu21TightCombinedIsoPFTau32Reg"), - path2 = cms.vstring ("hltPFTau32TrackPt1TightCombinedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu21TightCombinedIsoPFTau32Reg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - ) - - -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu27_v*'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## only events where slimmedMuons has exactly 1 muon -muonNumberFilter = cms.EDFilter ("muonNumberFilter", - src = cms.InputTag("slimmedMuons") -) - -## good muons for T&P -goodMuons = cms.EDFilter("PATMuonRefSelector", - src = cms.InputTag("slimmedMuons"), - cut = cms.string( - # 'pt > 5 && abs(eta) < 2.1 ' # kinematics - 'pt > 24 && abs(eta) < 2.1 ' # kinematics - '&& ( (pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - 0.5 * pfIsolationR04().sumPUPt, 0.0)) / pt() ) < 0.1 ' # isolation - '&& isMediumMuon()' # quality -- medium muon - ), - filter = cms.bool(True) -) - - - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("NewTauIDsEmbedded"), - cut = cms.string( - # 'pt > 5 && abs(eta) < 2.1 ' #kinematics - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - #'&& tauID("byVVLooseIsolationMVArun2v1DBoldDMwLT") > 0.5 ' - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - -## b jet veto : no additional b jets in the event (reject tt) -- use in sequence with -bjets = cms.EDFilter("PATJetRefSelector", - src = cms.InputTag("slimmedJets"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.4 ' #kinematics - '&& bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags") > 0.8484' # b tag with medium WP - ), - #filter = cms.bool(True) -) - -TagAndProbe = cms.EDFilter("TauTagAndProbeFilter", - taus = cms.InputTag("goodTaus"), - muons = cms.InputTag("goodMuons"), - met = cms.InputTag("slimmedMETs"), - useMassCuts = cms.bool(False), - electrons = cms.InputTag("slimmedElectrons"), - eleLooseIdMap = cms.InputTag("egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wpLoose"), - eleVeto = cms.bool(True), - bjets = cms.InputTag("bjets") -) - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - -Ntuplizer = cms.EDAnalyzer("Ntuplizer", - treeName = cms.string("TagAndProbe"), - isMC = cms.bool(False), - genCollection = cms.InputTag(""), - genPartCollection = cms.InputTag(""), - muons = cms.InputTag("TagAndProbe"), - taus = cms.InputTag("TagAndProbe"), - puInfo = cms.InputTag("slimmedAddPileupInfo"), - met = cms.InputTag("slimmedMETs"), - triggerList = HLTLIST, - triggerList_tag = HLTLIST_TAG, - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - #L1EmuTau = cms.InputTag("simCaloStage2Digis"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - -TAndPseq = cms.Sequence( - hltFilter + - muonNumberFilter + - goodMuons + - goodTaus + - bjets + - TagAndProbe -) - -NtupleSeq = cms.Sequence( - patTriggerUnpacker + - Ntuplizer -) diff --git a/TauTagAndProbe/python/tagAndProbe_2017_cff.py b/TauTagAndProbe/python/tagAndProbe_2017_cff.py deleted file mode 100644 index 8fc00775aa8..00000000000 --- a/TauTagAndProbe/python/tagAndProbe_2017_cff.py +++ /dev/null @@ -1,293 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on data" - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt - - -HLTLIST_TAG = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(13) - ), -) - - -HLTLIST = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_TightID_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoTightOOSCPhotonsPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24LooseChargedIsoTightOOSCPhotonsPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau20_TightID_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoTightOOSCPhotonsPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24MediumChargedIsoTightOOSCPhotonsPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu24TightChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau20_TightID_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sSingleMu22erL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoTightOOSCPhotonsPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoTightOOSCPhotonsAgainstMuon", "hltOverlapFilterIsoMu24TightChargedIsoTightOOSCPhotonsPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1LooseChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24LooseChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24LooseChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1LooseChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24LooseChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f20QL3trkIsoFiltered0p07"), - path2 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #MuTau CrossL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu+Tau40 - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau40_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau40_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau40TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau40MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), -) - - -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu27_v*'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## only events where slimmedMuons has exactly 1 muon -muonNumberFilter = cms.EDFilter ("muonNumberFilter", - src = cms.InputTag("slimmedMuons") -) - -## good muons for T&P -goodMuons = cms.EDFilter("PATMuonRefSelector", - src = cms.InputTag("slimmedMuons"), - cut = cms.string( - # 'pt > 5 && abs(eta) < 2.1 ' # kinematics - 'pt > 24 && abs(eta) < 2.1 ' # kinematics - '&& ( (pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - 0.5 * pfIsolationR04().sumPUPt, 0.0)) / pt() ) < 0.1 ' # isolation - '&& isMediumMuon()' # quality -- medium muon - ), - filter = cms.bool(True) -) - - - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("NewTauIDsEmbedded"), - cut = cms.string( - # 'pt > 5 && abs(eta) < 2.1 ' #kinematics - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - #'&& tauID("byVVLooseIsolationMVArun2v1DBoldDMwLT") > 0.5 ' - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - -## b jet veto : no additional b jets in the event (reject tt) -- use in sequence with -bjets = cms.EDFilter("PATJetRefSelector", - src = cms.InputTag("slimmedJets"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.4 ' #kinematics - '&& bDiscriminator("pfCombinedInclusiveSecondaryVertexV2BJetTags") > 0.8484' # b tag with medium WP - ), - #filter = cms.bool(True) -) - -TagAndProbe = cms.EDFilter("TauTagAndProbeFilter", - taus = cms.InputTag("goodTaus"), - muons = cms.InputTag("goodMuons"), - met = cms.InputTag("slimmedMETs"), - useMassCuts = cms.bool(False), - electrons = cms.InputTag("slimmedElectrons"), - eleLooseIdMap = cms.InputTag("egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wpLoose"), - eleVeto = cms.bool(True), - bjets = cms.InputTag("bjets") -) - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - -Ntuplizer = cms.EDAnalyzer("Ntuplizer", - treeName = cms.string("TagAndProbe"), - isMC = cms.bool(False), - genCollection = cms.InputTag(""), - genPartCollection = cms.InputTag(""), - muons = cms.InputTag("TagAndProbe"), - taus = cms.InputTag("TagAndProbe"), - puInfo = cms.InputTag("slimmedAddPileupInfo"), - met = cms.InputTag("slimmedMETs"), - triggerList = HLTLIST, - triggerList_tag = HLTLIST_TAG, - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - #L1EmuTau = cms.InputTag("simCaloStage2Digis"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - -TAndPseq = cms.Sequence( - hltFilter + - muonNumberFilter + - goodMuons + - goodTaus + - bjets + - TagAndProbe -) - -NtupleSeq = cms.Sequence( - patTriggerUnpacker + - Ntuplizer -) diff --git a/TauTagAndProbe/python/tagAndProbe_cff.py b/TauTagAndProbe/python/tagAndProbe_cff.py deleted file mode 100644 index 52fe8611db6..00000000000 --- a/TauTagAndProbe/python/tagAndProbe_cff.py +++ /dev/null @@ -1,343 +0,0 @@ -import FWCore.ParameterSet.Config as cms - -print "Running on data" - -# filter HLT paths for T&P -import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt - - -HLTLIST_TAG = cms.VPSet( - #MuTau SingleL1 - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(13) - ), -) - - - - -HLTLIST = cms.VPSet( - #Mu-Tau20 (VBF monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_MediumChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_TightChargedIsoPFTau20_Trk1_eta2p1_SingleL_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu-Tau35 (di-tau monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu-Tau50 (Tau+MET monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f24QL3trkIsoFiltered0p07"), - path2 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #Mu-Tau27 (signal path) - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #Mu+Tau HPS - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltHpsSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltHpsSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltHpsSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltHpsSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltHpsPFTau20TrackLooseChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_MediumChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - path2 = cms.vstring ("hltHpsPFTau20TrackMediumChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_TightChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27TightChargedIsoPFTau20"), - path2 = cms.vstring ("hltHpsPFTau20TrackTightChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27TightChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #SingleTau - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_v"), - path1 = cms.vstring ("hltSelectedPFTau180MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), -) - - - - - - -hltFilter = hlt.hltHighLevel.clone( - TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"), - HLTPaths = ['HLT_IsoMu27_v*'], - andOr = cms.bool(True), # how to deal with multiple triggers: True (OR) accept if ANY is true, False (AND) accept if ALL are true - throw = cms.bool(True) #if True: throws exception if a trigger path is invalid -) - -## only events where slimmedMuons has exactly 1 muon -muonNumberFilter = cms.EDFilter ("muonNumberFilter", - src = cms.InputTag("slimmedMuons") -) - -## good muons for T&P -goodMuons = cms.EDFilter("PATMuonRefSelector", - src = cms.InputTag("slimmedMuons"), - cut = cms.string( - # 'pt > 5 && abs(eta) < 2.1 ' # kinematics - 'pt > 24 && abs(eta) < 2.1 ' # kinematics - '&& ( (pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - 0.5 * pfIsolationR04().sumPUPt, 0.0)) / pt() ) < 0.1 ' # isolation - '&& isMediumMuon()' # quality -- medium muon - ), - filter = cms.bool(True) -) - - - -## good taus - apply analysis selection -goodTaus = cms.EDFilter("PATTauRefSelector", - src = cms.InputTag("NewTauIDsEmbedded"), - cut = cms.string( - # 'pt > 5 && abs(eta) < 2.1 ' #kinematics - 'pt > 20 && abs(eta) < 2.1 ' #kinematics - '&& abs(charge) > 0 && abs(charge) < 2 ' #sometimes 2 prongs have charge != 1 - '&& tauID("decayModeFinding") > 0.5 ' # tau ID - #'&& (tauID("decayModeFinding") > 0.5 || tauID("decayModeFindingNewDMs") > 0.5)' # tau ID - #'&& tauID("byLooseIsolationMVArun2v1DBoldDMwLT") > 0.5 ' - '&& tauID("againstMuonTight3") > 0.5 ' # anti Muon tight - '&& tauID("againstElectronVLooseMVA6") > 0.5 ' # anti-Ele loose - ), - filter = cms.bool(True) -) - -## b jet veto : no additional b jets in the event (reject tt) -- use in sequence with -bjets = cms.EDFilter("PATJetRefSelector", - src = cms.InputTag("slimmedJets"), - cut = cms.string( - 'pt > 20 && abs(eta) < 2.4 ' #kinematics - '&& (bDiscriminator("pfDeepFlavourJetTags:probb") + bDiscriminator("pfDeepFlavourJetTags:probbb") + bDiscriminator("pfDeepFlavourJetTags:problepb")) > 0.2770 ' # b tag with medium WP - ), - #filter = cms.bool(True) -) - -TagAndProbe = cms.EDFilter("TauTagAndProbeFilter", - taus = cms.InputTag("goodTaus"), - muons = cms.InputTag("goodMuons"), - met = cms.InputTag("slimmedMETs"), - useMassCuts = cms.bool(False), - electrons = cms.InputTag("slimmedElectrons"), - eleLooseIdMap = cms.InputTag("egmGsfElectronIDs:mvaEleID-Fall17-iso-V2-wpLoose"), - eleVeto = cms.bool(True), - bjets = cms.InputTag("bjets") -) - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - -Ntuplizer = cms.EDAnalyzer("Ntuplizer", - treeName = cms.string("TagAndProbe"), - isMC = cms.bool(False), - genCollection = cms.InputTag(""), - genPartCollection = cms.InputTag(""), - muons = cms.InputTag("TagAndProbe"), - taus = cms.InputTag("TagAndProbe"), - puInfo = cms.InputTag("slimmedAddPileupInfo"), - met = cms.InputTag("slimmedMETs"), - triggerList = HLTLIST, - triggerList_tag = HLTLIST_TAG, - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau", "RECO"), - #L1EmuTau = cms.InputTag("simCaloStage2Digis"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - Vertexes = cms.InputTag("offlineSlimmedPrimaryVertices"), - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "TEST"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "TEST") -) - -TAndPseq = cms.Sequence( - hltFilter + - muonNumberFilter + - goodMuons + - goodTaus + - bjets + - TagAndProbe -) - -NtupleSeq = cms.Sequence( - patTriggerUnpacker + - Ntuplizer -) diff --git a/TauTagAndProbe/python/zeroBias_cff.py b/TauTagAndProbe/python/zeroBias_cff.py deleted file mode 100644 index 0ade82e799d..00000000000 --- a/TauTagAndProbe/python/zeroBias_cff.py +++ /dev/null @@ -1,574 +0,0 @@ -import FWCore.ParameterSet.Config as cms - - -#print "Running on data or mc" - -HLTLIST = cms.VPSet( - #Mu-Tau20 (VBF monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackLooseChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_MediumChargedIsoPFTau20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackMediumChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27MediumChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_TightChargedIsoPFTau20_Trk1_eta2p1_SingleL_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20"), - path2 = cms.vstring ("hltPFTau20TrackTightChargedIsoAgainstMuon", "hltOverlapFilterIsoMu27TightChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #Mu-Tau35 (di-tau monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24MediumChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f24QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - path2 = cms.vstring ("hltSelectedPFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsL1HLTMatchedReg", "hltOverlapFilterIsoMu24TightChargedIsoAndTightOOSCPhotonsPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - #Mu-Tau50 (Tau+MET monitoring) - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22erIsoTau40erL1f0L2f10QL3f24QL3trkIsoFiltered0p07"), - path2 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatchedMu22IsoTau40"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - #Mu-Tau27 (signal path) - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTau27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - - #ETau CrossL1 - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTau30_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltOverlapFilterIsoEle24WPTightGsfLooseIsoPFTau30"), - path2 = cms.vstring ("hltSelectedPFTau30LooseChargedIsolationL1HLTMatched", "hltOverlapFilterIsoEle24WPTightGsfLooseIsoPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTau30_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltOverlapFilterIsoEle24WPTightGsfMediumIsoPFTau30"), - path2 = cms.vstring ("hltSelectedPFTau30MediumChargedIsolationL1HLTMatched", "hltOverlapFilterIsoEle24WPTightGsfMediumIsoPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTau30_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltOverlapFilterIsoEle24WPTightGsfTightIsoPFTau30"), - path2 = cms.vstring ("hltSelectedPFTau30TightChargedIsolationL1HLTMatched", "hltOverlapFilterIsoEle24WPTightGsfTightIsoPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTau30_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltOverlapFilterIsoEle24WPTightGsfLooseIsoTightOOSCPhotonsPFTau30"), - path2 = cms.vstring ("hltSelectedPFTau30LooseChargedIsolationTightOOSCPhotonsL1HLTMatched", "hltOverlapFilterIsoEle24WPTightGsfLooseIsoTightOOSCPhotonsPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTau30_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltOverlapFilterIsoEle24WPTightGsfMediumIsoTightOOSCPhotonsPFTau30"), - path2 = cms.vstring ("hltSelectedPFTau30MediumChargedIsolationTightOOSCPhotonsL1HLTMatched", "hltOverlapFilterIsoEle24WPTightGsfMediumIsoTightOOSCPhotonsPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTau30_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltOverlapFilterIsoEle24WPTightGsfTightIsoTightOOSCPhotonsPFTau30"), - path2 = cms.vstring ("hltSelectedPFTau30TightChargedIsolationTightOOSCPhotonsL1HLTMatched", "hltOverlapFilterIsoEle24WPTightGsfTightIsoTightOOSCPhotonsPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - - - - - - #Di-tau - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTau35_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau35TrackPt1MediumChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau35TrackPt1MediumChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTau40_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau40TrackPt1MediumChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau40TrackPt1MediumChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTau35_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau35TrackPt1TightChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau35TrackPt1TightChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTau40_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau40TrackPt1TightChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau40TrackPt1TightChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau35TrackPt1TIghtChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTau40_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltDoublePFTau40TrackPt1TightChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltDoublePFTau40TrackPt1TIghtChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - - #Tau + MET - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET90_v"), - path1 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET100_v"), - path1 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET110_v"), - path1 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET120_v"), - path1 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET130_v"), - path1 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v"), - path1 = cms.vstring ("hltSelectedPFTau50MediumChargedIsolationL1HLTMatched"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - - #SingleTau - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau100HighPtRelaxedIso_Trk50_eta2p1_1pr_v"), - path1 = cms.vstring ("hltSelectedPFTau180MediumChargedIsolationL1HLTMatched1Prong"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_v"), - path1 = cms.vstring ("hltSelectedPFTau180MediumChargedIsolationL1HLTMatched1Prong"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau200HighPtRelaxedIso_Trk50_eta2p1_v"), - path1 = cms.vstring ("hltSelectedPFTau200MediumChargedIsolationL1HLTMatched1Prong"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - cms.PSet ( - HLT = cms.string("HLT_MediumChargedIsoPFTau220HighPtRelaxedIso_Trk50_eta2p1_v"), - path1 = cms.vstring ("hltSelectedPFTau220MediumChargedIsolationL1HLTMatched1Prong"), - path2 = cms.vstring (""), - leg1 = cms.int32(15), - leg2 = cms.int32(999) - ), - - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(13), - leg2 = cms.int32(-1) - ), - - cms.PSet ( - HLT = cms.string("HLT_VBF_DoubleLooseChargedIsoPFTau20_Trk1_eta2p1_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(-1), - leg2 = cms.int32(-1) - ), - - cms.PSet ( - HLT = cms.string("HLT_VBF_DoubleMediumChargedIsoPFTau20_Trk1_eta2p1_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(-1), - leg2 = cms.int32(-1) - ), - - cms.PSet ( - HLT = cms.string("HLT_VBF_DoubleTightChargedIsoPFTau20_Trk1_eta2p1_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(-1), - leg2 = cms.int32(-1) - ), - - - - #Mu+Tau HPS - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27LooseChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27MediumChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20MediumChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27TightChargedIsolationAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20TightChargedIsoPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27LooseChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20LooseChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27MediumChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20MediumChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu18erTau24erIorMu20erTau24erL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - path2 = cms.vstring ("hltHpsSelectedPFTau27TightChargedIsolationTightOOSCPhotonsAgainstMuonL1HLTMatched", "hltHpsOverlapFilterIsoMu20TightChargedIsoTightOOSCPhotonsPFTau27L1Seeded"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - cms.PSet ( - HLT = cms.string("HLT_IsoMu27_LooseChargedIsoPFTauHPS20_Trk1_eta2p1_SingleL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - path2 = cms.vstring ("hltHpsPFTau20TrackLooseChargedIsoAgainstMuon", "hltHpsOverlapFilterIsoMu27LooseChargedIsoPFTau20"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - cms.PSet ( - HLT = cms.string("HLT_IsoMu24_eta2p1_MediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_CrossL1_v"), - path1 = cms.vstring ("hltL3crIsoL1sBigOrMuXXerIsoTauYYerL1f0L2f10QL3f20QL3trkIsoFiltered0p07", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - path2 = cms.vstring ("hltHpsSelectedPFTau35TrackPt1MediumChargedIsolationL1HLTMatchedReg", "hltHpsOverlapFilterIsoMu24MediumChargedIsoPFTau35MonitoringReg"), - leg1 = cms.int32(13), - leg2 = cms.int32(15) - ), - - - #E+Tau HPS - - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTauHPS30_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltHpsOverlapFilterIsoEle24WPTightGsfLooseIsoPFTau30"), - path2 = cms.vstring ("hltHpsSelectedPFTau30LooseChargedIsolationL1HLTMatched", "hltHpsOverlapFilterIsoEle24WPTightGsfLooseIsoPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTauHPS30_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltHpsOverlapFilterIsoEle24WPTightGsfMediumIsoPFTau30"), - path2 = cms.vstring ("hltHpsSelectedPFTau30MediumChargedIsolationL1HLTMatched", "hltHpsOverlapFilterIsoEle24WPTightGsfMediumIsoPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTauHPS30_eta2p1_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltHpsOverlapFilterIsoEle24WPTightGsfTightIsoPFTau30"), - path2 = cms.vstring ("hltHpsSelectedPFTau30TightChargedIsolationL1HLTMatched", "hltHpsOverlapFilterIsoEle24WPTightGsfTightIsoPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltHpsOverlapFilterIsoEle24WPTightGsfLooseIsoTightOOSCPhotonsPFTau30"), - path2 = cms.vstring ("hltHpsSelectedPFTau30LooseChargedIsolationTightOOSCPhotonsL1HLTMatched", "hltHpsOverlapFilterIsoEle24WPTightGsfLooseIsoTightOOSCPhotonsPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltHpsOverlapFilterIsoEle24WPTightGsfMediumIsoTightOOSCPhotonsPFTau30"), - path2 = cms.vstring ("hltHpsSelectedPFTau30MediumChargedIsolationTightOOSCPhotonsL1HLTMatched", "hltHpsOverlapFilterIsoEle24WPTightGsfMediumIsoTightOOSCPhotonsPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v"), - path1 = cms.vstring ("hltEle24erWPTightGsfTrackIsoFilterForTau", "hltHpsOverlapFilterIsoEle24WPTightGsfTightIsoTightOOSCPhotonsPFTau30"), - path2 = cms.vstring ("hltHpsSelectedPFTau30TightChargedIsolationTightOOSCPhotonsL1HLTMatched", "hltHpsOverlapFilterIsoEle24WPTightGsfTightIsoTightOOSCPhotonsPFTau30"), - leg1 = cms.int32(11), - leg2 = cms.int32(15) - ), - - #Di-tau HPS - - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau35TrackPt1MediumChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau35TrackPt1MediumChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTauHPS40_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau40TrackPt1MediumChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau40TrackPt1MediumChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau35TrackPt1TightChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau35TrackPt1TightChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTauHPS40_Trk1_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau40TrackPt1TightChargedIsolationDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau40TrackPt1TightChargedIsolationDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau35TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleMediumChargedIsoPFTauHPS40_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau40TrackPt1MediumChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau35TrackPt1TightChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau35TrackPt1TIghtChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - cms.PSet ( - HLT = cms.string("HLT_DoubleTightChargedIsoPFTauHPS40_Trk1_TightID_eta2p1_Reg_v"), - path1 = cms.vstring ("hltHpsDoublePFTau40TrackPt1TightChargedIsolationAndTightOOSCPhotonsDz02Reg"), - path2 = cms.vstring ("hltHpsDoublePFTau40TrackPt1TIghtChargedIsolationAndTightOOSCPhotonsDz02Reg"), - leg1 = cms.int32(15), - leg2 = cms.int32(15) - ), - - - #VBF HPS - - cms.PSet ( - HLT = cms.string("HLT_VBF_DoubleLooseChargedIsoPFTauHPS20_Trk1_eta2p1_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(-1), - leg2 = cms.int32(-1) - ), - - cms.PSet ( - HLT = cms.string("HLT_VBF_DoubleMediumChargedIsoPFTauHPS20_Trk1_eta2p1_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(-1), - leg2 = cms.int32(-1) - ), - - cms.PSet ( - HLT = cms.string("HLT_VBF_DoubleTightChargedIsoPFTauHPS20_Trk1_eta2p1_v"), - path1 = cms.vstring (""), - path2 = cms.vstring (""), - leg1 = cms.int32(-1), - leg2 = cms.int32(-1) - ) - - - -) - - - - - -ZeroBias = cms.EDAnalyzer("ZeroBias", - treeName = cms.string("ZeroBias"), - L1Tau = cms.InputTag("caloStage2Digis", "Tau"), - L1EmuTau = cms.InputTag("simCaloStage2Digis", "MP"), - l1tJetCollection = cms.InputTag("caloStage2Digis","Jet"), - l1tEmuJetCollection = cms.InputTag("simCaloStage2Digis","MP"), - L1EG = cms.InputTag("caloStage2Digis", "EGamma"), - L1EmuEG = cms.InputTag("simCaloStage2Digis", "MP"), - L1Mu = cms.InputTag("hltGtStage2Digis"), - L1EmuMu = cms.InputTag("simGtStage2Digis"), - triggerList = HLTLIST, - #triggerSet = cms.InputTag("slimmedPatTrigger"), - triggerSet = cms.InputTag("patTriggerUnpacker"), - triggerResultsLabel = cms.InputTag("TriggerResults", "", "HLT"), - L2CaloJet_L1TauSeeded_Collection = cms.InputTag("hltL2TauJetsL1IsoTauSeeded", "", "MYHLT"), - L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "MYHLT"), - L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "MYHLT"), - L2CaloJet_IsoPix_Collection = cms.InputTag("hltL2TauJetsIso", "", "MYHLT"), - PixelTrackCollection = cms.InputTag("hltPixelTracksMergedRegForTau", "", "MYHLT"), - MergedTrackCollection = cms.InputTag("hltMergedTracksTauReg", "", "MYHLT"), - PFRegCandCollection = cms.InputTag("hltParticleFlowReg", "", "MYHLT"), - AK4PFRegJetCollection = cms.InputTag("hltAK4PFJetsReg", "", "MYHLT"), - PFTauSansRefRegCollection = cms.InputTag("hltPFTausSansRefReg", "", "MYHLT"), - PFJetRegionCollection = cms.InputTag("hltTauPFJets08RegionReg", "jets", "MYHLT"), - PFJetChargedHadronAssociation = cms.InputTag("hltTauPFJetsRecoTauChargedHadronsReg", "", "MYHLT"), - JetPiZeroAssociation = cms.InputTag("hltPFTauPiZerosReg", "", "MYHLT") -) - - -NtupleZeroBiasSeq = cms.Sequence( - ZeroBias -) - - - - - -patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", - patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), - triggerResults = cms.InputTag('TriggerResults', '', "HLT"), - unpackFilterLabels = cms.bool(True) - ) - -patTriggerUnpackerSeq = cms.Sequence( - patTriggerUnpacker -) diff --git a/TauTagAndProbe/src/GenHelper.cc b/TauTagAndProbe/src/GenHelper.cc deleted file mode 100644 index 4f67f7127cb..00000000000 --- a/TauTagAndProbe/src/GenHelper.cc +++ /dev/null @@ -1,514 +0,0 @@ -/* -** -** Helpers for gen info (implementation) -** -** -** \date: 13 May 2015 -** \author: L. Cadamuro (LLR) -*/ - -#include -#include "TauTriggerTools/TauTagAndProbe/interface/GenHelper.h" -#include -#include -#include "DataFormats/TauReco/interface/PFTauDecayMode.h" - -bool genhelper::IsLastCopy (const reco::GenParticle& part) -{ - bool isLast = true; - int thisPdgId = part.pdgId(); - - if (abs(thisPdgId) == 25 || abs(thisPdgId) == 23 || abs(thisPdgId) == 15) // H, Z, tau must decay - if (part.numberOfDaughters() == 0) return false; // can happen to have a fake "clone" that does not decay --> reject (it is not a real "last") - - // other particles, or H/Z/tau with sons - for (unsigned int iDau = 0; iDau < part.numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = part.daughter(iDau); - bool pdgDecaying = (abs(thisPdgId) == 25 || abs(thisPdgId) == 23 || abs(thisPdgId) == 15); - if (Dau->pdgId() == thisPdgId && (Dau->numberOfDaughters() > 0 || !pdgDecaying)) // sometimes a "fake" clone is produced but not decayed - { - isLast = false; - break; - } - } - return isLast; -} - -bool genhelper::IsFirstCopy (const reco::GenParticle& part, const bool checkAbsPdg) -{ - bool isFirst = true; - int thisPdgId = part.pdgId(); - for (unsigned int iMo = 0; iMo < part.numberOfMothers(); iMo++) - { - const reco::Candidate * Mo = part.mother(iMo); - bool pdgMatch = (checkAbsPdg ? (abs(thisPdgId) == abs(Mo->pdgId())) : (thisPdgId == Mo->pdgId()) ); - if (pdgMatch) - { - isFirst = false; - break; - } - } - return isFirst; -} - - -int genhelper::GetTauDecay (const reco::Candidate* part) -{ - if (abs(part->pdgId()) != 15) return -1; // only on taus - int decay = -1; - int nele = 0; - int nmu = 0; - for (unsigned int iDau = 0; iDau < part->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = part->daughter(iDau); - int dauId = abs(Dau->pdgId()); - if (dauId == 11) nele++; - if (dauId == 13) nmu++; - } - - if (nmu == 1 && nele == 0) decay = 0; - if (nmu == 0 && nele == 1) decay = 1; - if (nmu == 0 && nele == 0) decay = 2; - - return decay; // -1 if strange things happen -} - -int genhelper::GetTauDecay (const reco::GenParticle& part) -{ - if ( !(part.statusFlags().isLastCopy()) ) return -1; // only for last copies - const reco::Candidate* p = ∂ - return genhelper::GetTauDecay(p); -} - - - -const reco::Candidate* genhelper::GetFirstCopy (const reco::Candidate* part) -{ - int cloneInd = -1; - int id = part->pdgId(); - for (unsigned int iMot = 0; iMot < part->numberOfMothers(); iMot++) - { - const reco::Candidate * Mot = part->mother( iMot ); - if (id == Mot->pdgId()) - { - cloneInd = iMot; - break; - } - } - - if (cloneInd == -1) return part; - else return (GetFirstCopy (part->mother(cloneInd))); - -} - - - - -const reco::Candidate* genhelper::GetLastCopy (const reco::Candidate* part) -{ - int cloneInd = -1; - int id = part->pdgId(); - for (unsigned int iDau = 0; iDau < part->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = part->daughter( iDau ); - if (id == Dau->pdgId()) - { - // check gen flags - if pdgId is the same but this particle isFirst(), then it means it is a X -> X+gamma -> X + (XX) process - const reco::GenParticle* gpDau = static_cast(Dau); - const reco::GenStatusFlags& fl = gpDau->statusFlags(); - if (fl.isFirstCopy()) continue; - - cloneInd = iDau; - break; - } - } - - if (cloneInd == -1) return part; - else return (GetLastCopy (part->daughter(cloneInd))); - -} - -genhelper::HZDecay genhelper::GetHZDecay (const reco::Candidate* part) -{ - int ntau = 0; - int nele = 0; - int nmu = 0; - - for (unsigned int iDau = 0; iDau < part->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = part->daughter( iDau ); - if (abs(Dau->pdgId()) == 11 ) nele++; - if (abs(Dau->pdgId()) == 13 ) nmu++; - if (abs(Dau->pdgId()) == 15 ) - { - ntau++; - int decay = genhelper::GetTauDecay (genhelper::GetLastCopy(Dau)); - if (decay == 0) nmu++; - if (decay == 1) nele++; - } - } - - // determine decay mode - if (ntau == 0) - { - if (nele == 0 && nmu == 2) return genhelper::HZDecay::MuMuPrompt; - else if (nele == 2 && nmu == 0) return genhelper::HZDecay::EEPrompt; - else return genhelper::HZDecay::Other; - } - - else if (ntau == 2) - { - if (nmu == 0 && nele == 0) return genhelper::HZDecay::HadHad; - if (nmu == 0 && nele == 1) return genhelper::HZDecay::EHad; - if (nmu == 0 && nele == 2) return genhelper::HZDecay::EE; - if (nmu == 1 && nele == 0) return genhelper::HZDecay::MuHad; - if (nmu == 1 && nele == 1) return genhelper::HZDecay::EMu; - if (nmu == 2 && nele == 0) return genhelper::HZDecay::MuMu; - } - - return genhelper::HZDecay::Other; - -} - - - - -genhelper::WDecay genhelper::GetWDecay (const reco::Candidate* part) -{ - - if (abs(part->pdgId()) != 24) return genhelper::WDecay::other; // only on W's - - int ntau = 0; - int nele = 0; - int nmu = 0; - int nquark = 0; - - const reco::Candidate * W = genhelper::GetLastCopy(part); - - for (unsigned int iDau = 0; iDau < W->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = W->daughter( iDau ); - if (abs(Dau->pdgId()) < 5 ) nquark++; - if (abs(Dau->pdgId()) == 11 ) nele++; - if (abs(Dau->pdgId()) == 13 ) nmu++; - if (abs(Dau->pdgId()) == 15 ) - { - ntau++; - int decay = genhelper::GetTauDecay (genhelper::GetLastCopy(Dau)); - if (decay == 0) nmu++; - if (decay == 1) nele++; - } - } - - // determine decay mode - if (nquark == 2 && (nmu+nele+ntau)==0) - return genhelper::WDecay::Had; - - else if (nquark ==0 && ntau == 0) - { - if (nele == 0 && nmu == 1) return genhelper::WDecay::MuPrompt; - else if (nele == 1 && nmu == 0) return genhelper::WDecay::EPrompt; - else return genhelper::WDecay::other; - } - - else if (nquark==0 && ntau == 1) - { - if (nmu == 0 && nele == 0) return genhelper::WDecay::TauHad; - if (nmu == 0 && nele == 1) return genhelper::WDecay::TauE; - if (nmu == 1 && nele == 0) return genhelper::WDecay::TauMu; - else return genhelper::WDecay::other; - } - - return genhelper::WDecay::other; - -} - - - - - -genhelper::WDecay genhelper::GetTopDecay (const reco::Candidate* part) -{ - - if (abs(part->pdgId()) != 6) return genhelper::WDecay::other; // only on tops - const reco::Candidate * top = genhelper::GetLastCopy(part); - - - for (unsigned int iDau = 0; iDau < top->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = top->daughter( iDau ); - if(abs(Dau->pdgId()) == 24 ) return genhelper::GetWDecay(Dau); - - } - - return genhelper::WDecay::other; - -} - - - - -reco::GenParticle genhelper::GetTauHad (const reco::Candidate* part) -{ - if (abs(part->pdgId()) != 15) - { - reco::GenParticle fakeTauH = reco::GenParticle (0, reco::Candidate::LorentzVector(0.,0.,0.,0.), reco::Candidate::Point (0.,0.,0.), -999999, 0, true); - std::cout << "Warning: building had tau from a particle with pdgId != 15 --> dummy entry returned" << std::endl; - return fakeTauH; - } - - reco::Candidate::LorentzVector p4Had (0,0,0,0); - for (unsigned int iDau = 0; iDau < part->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = part->daughter( iDau ); - int dauId = abs(Dau->pdgId()); - if (dauId != 12 && dauId != 14 && dauId != 16) // no neutrinos - p4Had += Dau->p4(); - } - - int sign = part->pdgId() / abs(part->pdgId()); - reco::GenParticle TauH = reco::GenParticle (part->charge(), p4Had, part->vertex(), sign*66615, part->status(), true); - return TauH; -} - -reco::GenParticle genhelper::GetTauHadNeutrals (const reco::Candidate* part) -{ - if (abs(part->pdgId()) != 15) - { - reco::GenParticle fakeTauH = reco::GenParticle (0, reco::Candidate::LorentzVector(0.,0.,0.,0.), reco::Candidate::Point (0.,0.,0.), -999999, 0, true); - std::cout << "Warning: building had tau from a particle with pdgId != 15 --> dummy entry returned" << std::endl; - return fakeTauH; - } - - reco::Candidate::LorentzVector p4Had (0,0,0,0); - for (unsigned int iDau = 0; iDau < part->numberOfDaughters(); iDau++) - { - const reco::Candidate * Dau = part->daughter( iDau ); - int dauId = abs(Dau->pdgId()); - if (dauId != 12 && dauId != 14 && dauId != 16 && Dau->charge()==0) // no neutrinos - p4Had += Dau->p4(); - } - - int sign = part->pdgId() / abs(part->pdgId()); - reco::GenParticle TauH = reco::GenParticle (part->charge(), p4Had, part->vertex(), sign*77715, part->status(), true); - return TauH; -} - -const reco::Candidate* genhelper::IsFromID (const reco::Candidate* part, int targetPDGId) -{ - if (abs(part->pdgId()) == targetPDGId){ - if(abs(part->pdgId()) == 5) return GetFirstCopy(part); - else return part; - } - - for (unsigned int i = 0; i < part->numberOfMothers(); i++) - { - const reco::Candidate* matchMoth = genhelper::IsFromID(part->mother(i), targetPDGId); - if ( matchMoth != NULL) return matchMoth; - } - - // nothing found, mothers finished, exiting... - return NULL; - -} - -int genhelper::GetIndexInOutput (const reco::Candidate* part, std::vector cands) -{ - int index = -1; - std::vector::const_iterator found = find(cands.begin(), cands.end(), part); - if(found != cands.end()) index = found - cands.begin(); - return index; - -} -/////////////////////////////////////////////////////// -/////////////////////////////////////////////////////// -TVector3 genhelper::ImpactParameter(const TVector3& pv, const TVector3& sv, const TLorentzVector& p4){ - TVector3 dir = (p4.Vect()).Unit(); - return (sv-pv) - ((sv-pv)*dir)*dir; -} -/////////////////////////////////////////////////////// -/////////////////////////////////////////////////////// -void genhelper::GetTausDaughters(const reco::GenParticle& tau, - reco::GenParticleRefVector& products, - bool ignoreNus, bool direct){ - - products.clear(); - if(!direct) - FindDescendents(tau, products, 1, 0); - else{ - const reco::GenParticleRefVector& daughterRefs = tau.daughterRefVector(); - for(IGR idr = daughterRefs.begin(); idr != daughterRefs.end(); ++idr ) - products.push_back(*idr); - } - if(ignoreNus){ - std::set allNus; - allNus.insert(12); - allNus.insert(14); - allNus.insert(16); - //allNus.insert(18); - reco::GenParticleRefVector tmp; - for(IGR idr=products.begin(); idr !=products.end(); ++idr) - if(allNus.find(std::abs((*idr)->pdgId()))==allNus.end()) - tmp.push_back((*idr)); - products.swap(tmp); - } -} -/////////////////////////////////////////////////////// -/////////////////////////////////////////////////////// -//copy of function from PhysicsTools/HepMCCandAlgos/interface/GenParticlesHelper.h" which allows ignore status -void genhelper::FindDescendents(const reco::GenParticle& base, - reco::GenParticleRefVector& descendents, - int status, int pdgId, - bool skipPhotonsPi0AndFSR) { - - //one form status or pdgId has to be specifed! - if(status<0 && pdgId==0) return; - - const reco::GenParticleRefVector& daughterRefs = base.daughterRefVector(); - - for(IGR idr = daughterRefs.begin(); idr != daughterRefs.end(); ++idr ) { - - ///Skip leptons from pi0 decays - if(skipPhotonsPi0AndFSR && (*idr)->mother(0) && (abs((*idr)->mother(0)->pdgId())==22 || abs((*idr)->mother(0)->pdgId())==111)) continue; - ///Skip electrons from FSR from muons - if(skipPhotonsPi0AndFSR && (*idr)->mother(0) && abs((*idr)->mother(0)->pdgId())==13 && abs((*idr)->pdgId())==11) continue; - ///Skip muons and electrons from FSR - if(skipPhotonsPi0AndFSR && (*idr)->mother(0) && abs((*idr)->mother(0)->pdgId())==13 && abs((*idr)->pdgId())==11) continue; - if(skipPhotonsPi0AndFSR && (*idr)->mother(0) && abs((*idr)->mother(0)->pdgId())==11 && abs((*idr)->pdgId())==13) continue; - - - if( (status<0 || (*idr)->status() == status ) && - (!pdgId || std::abs((*idr)->pdgId()) == std::abs(pdgId) )) { - descendents.push_back(*idr); - } - else FindDescendents( *(*idr), descendents, status, pdgId, skipPhotonsPi0AndFSR); - } -} -/////////////////////////////////////////////////////// -/////////////////////////////////////////////////////// -const reco::GenParticleRef genhelper::GetLeadChParticle(const reco::GenParticleRefVector& products){ - - float maxPt=0; - reco::GenParticleRef part; - std::set charged; - charged.insert(211);//pi - charged.insert(321);//K - charged.insert(11);//e - charged.insert(13);//mu - - for(IGR idr = products.begin(); idr != products.end(); ++idr ){ - if( (*idr)->pt() > maxPt && - //charged.find( std::abs( (*idr)->pdgId() ) )!=charged.end() //MB: Logix used in pure Pythia code when charge not defined - std::abs( (*idr)->charge() )>0.001 //MB: GenParts have defined charge - ){ - maxPt = (*idr)->pt(); - part = (*idr); - } - } - return part; - } -/////////////////////////////////////////////////////// -/////////////////////////////////////////////////////// -int genhelper::getDetailedTauDecayMode(const reco::GenParticleRefVector& products){ - - int tauDecayMode = reco::PFTauDecayMode::tauDecayOther; - - int numElectrons = 0; - int numMuons = 0; - int numChargedPions = 0; - int numNeutralPions = 0; - int numPhotons = 0; - int numNeutrinos = 0; - int numOtherParticles = 0; - - for(IGR idr = products.begin(); idr != products.end(); ++idr ) { - int pdg_id = std::abs((*idr)->pdgId()); - if(pdg_id == 11) numElectrons++; - else if(pdg_id == 13) numMuons++; - else if(pdg_id == 211 || pdg_id == 321 ) numChargedPions++; //Count both pi+ and K+ - else if(pdg_id == 111 || pdg_id == 130 || pdg_id == 310 ) numNeutralPions++; //Count both pi0 and K0_L/S - else if(pdg_id == 12 || - pdg_id == 14 || - pdg_id == 16) { - numNeutrinos++; - } - else if(pdg_id == 22) numPhotons++; - else { - numOtherParticles++; - } - } - if(numElectrons>1){//sometimes there are gamma->ee conversions - numPhotons += numElectrons/2; - numElectrons -= 2*(numElectrons/2); - } - - if( numOtherParticles == 0 ){ - if( numElectrons == 1 ){ - //--- tau decays into electrons - tauDecayMode = reco::PFTauDecayMode::tauDecaysElectron; - } else if( numMuons == 1 ){ - //--- tau decays into muons - tauDecayMode = reco::PFTauDecayMode::tauDecayMuon; - } else { - //--- hadronic tau decays - switch ( numChargedPions ){ - case 1 : - if( numNeutralPions != 0 ){ - tauDecayMode = reco::PFTauDecayMode::tauDecayOther; - break; - } - switch ( numPhotons ){ - case 0: - tauDecayMode = reco::PFTauDecayMode::tauDecay1ChargedPion0PiZero; - break; - case 2: - tauDecayMode = reco::PFTauDecayMode::tauDecay1ChargedPion1PiZero; - break; - case 4: - tauDecayMode = reco::PFTauDecayMode::tauDecay1ChargedPion2PiZero; - break; - case 6: - tauDecayMode = reco::PFTauDecayMode::tauDecay1ChargedPion3PiZero; - break; - case 8: - tauDecayMode = reco::PFTauDecayMode::tauDecay1ChargedPion4PiZero; - break; - default: - tauDecayMode = reco::PFTauDecayMode::tauDecayOther; - break; - } - break; - case 3 : - if( numNeutralPions != 0 ){ - tauDecayMode = reco::PFTauDecayMode::tauDecayOther; - break; - } - switch ( numPhotons ){ - case 0 : - tauDecayMode = reco::PFTauDecayMode::tauDecay3ChargedPion0PiZero; - break; - case 2 : - tauDecayMode = reco::PFTauDecayMode::tauDecay3ChargedPion1PiZero; - break; - case 4 : - tauDecayMode = reco::PFTauDecayMode::tauDecay3ChargedPion2PiZero; - break; - case 6 : - tauDecayMode = reco::PFTauDecayMode::tauDecay3ChargedPion3PiZero; - break; - case 8 : - tauDecayMode = reco::PFTauDecayMode::tauDecay3ChargedPion4PiZero; - break; - default: - tauDecayMode = reco::PFTauDecayMode::tauDecayOther; - break; - } - break; - } - } - } - return tauDecayMode; -} -/////////////////////////////////////////////////////// -/////////////////////////////////////////////////////// diff --git a/TauTagAndProbe/test/CreateFileList.C b/TauTagAndProbe/test/CreateFileList.C deleted file mode 100644 index 8c5b858f4bf..00000000000 --- a/TauTagAndProbe/test/CreateFileList.C +++ /dev/null @@ -1,80 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace std; - -void CreateFileList(string Dataset = "/SingleMuon/Run2016H-PromptReco-v2/MINIAOD", string Outfile = "fileList.txt", string SpaceSeparatedListOfRuns = "277305 277420") -{ - std::vector vectOfRuns; - - if(SpaceSeparatedListOfRuns!="") - { - std::stringstream ss(SpaceSeparatedListOfRuns); - - string i; - - while (ss >> i) - { - vectOfRuns.push_back(i); - - if (ss.peek() == ' ') - ss.ignore(); - } - } - - TString RmCommand = "rm "+Outfile; - gSystem->Exec(RmCommand.Data()); - - TString ExecQueryRuns = "python ./das_client.py --query=\"run dataset="+Dataset; - ExecQueryRuns += "\" --limit=0 "; - cout<<"ExecQueryRuns = "<GetFromPipe(ExecQueryRuns.Data()); - cout<<"Runs = "<> "; - ExecQuery += Outfile ; - cout<Exec(ExecQuery.Data()); - - } - } - else - { - TString ExecQuery = "python ./das_client.py --query=\"file dataset="+Dataset+" "; - ExecQuery += "\" --limit=0 >> "; - ExecQuery += Outfile ; - cout<Exec(ExecQuery.Data()); - } - - - -} diff --git a/TauTagAndProbe/test/MakeTurnOn.py b/TauTagAndProbe/test/MakeTurnOn.py deleted file mode 100644 index 8a3475ad88e..00000000000 --- a/TauTagAndProbe/test/MakeTurnOn.py +++ /dev/null @@ -1,239 +0,0 @@ -#!/usr/bin/python -# -# -# Epoch B: 273150 - 275376 -# Epoch C: 275420 - 276283 -# Epoch D: 276315 - 276811 -# -from ROOT import * -from array import array - -gStyle.SetOptStat(111111) - -epochBMinRunNumber = 273150 -epochBMaxRunNumber = 275376 - -epochCMinRunNumber = 275420 -epochCMaxRunNumber = 276283 - -epochDMinRunNumber = 276315 -epochDMaxRunNumber = 276811 - -fIn = TFile.Open('NTuple_10Ago_Riccardo.root') - -tree = fIn.Get('Ntuplizer/TagAndProbe') - -binning = [4, 8, 10, 12, 14, 16, 18, 19, 20, 21, 22, 23, 24, 26, 28, 30, 32, 35, 40, 45, 50, 60, 70, 90, 110, 140] -bins = array('d', binning) - -triggerNamesTree = fIn.Get("Ntuplizer/triggerNames") - -triggerNamesList = [] - -l1tCuts = [28, 34, 42] - -# hpass = TH1F ("hpass", "hpass", 75, 0, 150) -# htot = TH1F ("htot", "htot", 75, 0, 150) -hPassListHLT_SS = [] -hPassListHLT_OS = [] -hTotListHLT_SS = [] -hTotListHLT_OS = [] -hPassListL1T_SS = [] -hPassListL1T_OS = [] -hTotListL1T_SS = [] -hTotListL1T_OS = [] -hPassListL1T_Iso_SS = [] -hPassListL1T_Iso_OS = [] -hTotListL1T_Iso_SS = [] -hTotListL1T_Iso_OS = [] - -turnOnList_HLT = [] -turnOnList_L1T = [] -turnOnList_L1T_Iso = [] - -for iTrig in range (0, 6): - triggerNamesTree.GetEntry(iTrig) - triggerNamesList.append(triggerNamesTree.triggerNames.Data()) - -print "Creating histograms" - -#Preparing the Histograms -for bitIndex in range(0, len(triggerNamesList)): - hPassListHLT_OS.append(TH1F("hPassOS_"+triggerNamesList[bitIndex], "hPassOS_"+triggerNamesList[bitIndex], len(binning)-1, bins)) - hTotListHLT_OS.append(TH1F("hTotOS_"+triggerNamesList[bitIndex], "hTotOS_"+triggerNamesList[bitIndex], len(binning)-1, bins)) - hPassListHLT_SS.append(TH1F("hPassSS_"+triggerNamesList[bitIndex], "hPassSS_"+triggerNamesList[bitIndex], len(binning)-1, bins)) - hTotListHLT_SS.append(TH1F("hTotSS_"+triggerNamesList[bitIndex], "hTotSS_"+triggerNamesList[bitIndex], len(binning)-1, bins)) - turnOnList_HLT.append(TGraphAsymmErrors()) - -hPassTest_OS = TH1F("hPassTestOS", "hPassTestOS", len(binning)-1, bins) -hPassTest_SS = TH1F("hPassTestSS", "hPassTestSS", len(binning)-1, bins) -hTotTest_OS = TH1F("hTotTestOS", "hTotTestOS", len(binning)-1, bins) -hTotTest_SS = TH1F("hTotTestSS", "hTotTestSS", len(binning)-1, bins) -hTurnOnTest = TGraphAsymmErrors() - -for cutIndex in range(0, len(l1tCuts)): - hTotListL1T_OS.append(TH1F("hTotL1IOS_" + str(l1tCuts[cutIndex]), "hTotL1OS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hPassListL1T_OS.append(TH1F("hPassL1OS_" + str(l1tCuts[cutIndex]), "hPassL1OS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hTotListL1T_SS.append(TH1F("hTotL1SS_" + str(l1tCuts[cutIndex]), "hTotL1SS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hPassListL1T_SS.append(TH1F("hPassL1SS_" + str(l1tCuts[cutIndex]), "hPassL1SS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hTotListL1T_Iso_OS.append(TH1F("hTotL1IsoOS_" + str(l1tCuts[cutIndex]), "hTotL1IsoOS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hPassListL1T_Iso_OS.append(TH1F("hPassL1IsoOS_" + str(l1tCuts[cutIndex]), "hPassL1IsoOS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hTotListL1T_Iso_SS.append(TH1F("hTotL1IsoSS_" + str(l1tCuts[cutIndex]), "hTotL1IsoSS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - hPassListL1T_Iso_SS.append(TH1F("hPassL1IsoSS_" + str(l1tCuts[cutIndex]), "hPassL1IsoSS_"+str(l1tCuts[cutIndex]), len(binning)-1, bins)) - turnOnList_L1T.append(TGraphAsymmErrors()) - turnOnList_L1T_Iso.append(TGraphAsymmErrors()) - -print "Populating histograms" - -#Populating the histograms -for iEv in range (0, tree.GetEntries()): - tree.GetEntry(iEv) - - #if tree.RunNumber < epochDMinRunNumber and tree.RunNumber > epochDMaxRunNumber: - # continue - - if abs(tree.tauEta) > 2.1: - continue - - pt = tree.tauPt - - #HLT Plots - triggerBits = tree.tauTriggerBits - for bitIndex in range(0, len(triggerNamesList)): - if tree.isOS == True: - hTotListHLT_OS[bitIndex].Fill(pt) - if ((triggerBits >> bitIndex) & 1) == 1: - hPassListHLT_OS[bitIndex].Fill(pt) - else: - hTotListHLT_SS[bitIndex].Fill(pt) - if ((triggerBits >> bitIndex) & 1) == 1: - hPassListHLT_SS[bitIndex].Fill(pt) - - #L1 Plots - l1tPt = tree.l1tPt - for cutIndex in range (0, len(l1tCuts)): - if tree.isOS == True: - hTotListL1T_OS[cutIndex].Fill(pt) - hTotListL1T_Iso_OS[cutIndex].Fill(pt) - if l1tPt > l1tCuts[cutIndex] : - hPassListL1T_OS[cutIndex].Fill(pt) - if tree.l1tIso == 1: - hPassListL1T_Iso_OS[cutIndex].Fill(pt) - else : - hTotListL1T_SS[cutIndex].Fill(pt) - hTotListL1T_Iso_SS[cutIndex].Fill(pt) - if l1tPt > l1tCuts[cutIndex] : - hPassListL1T_SS[cutIndex].Fill(pt) - if tree.l1tIso == 1: - hPassListL1T_Iso_SS[cutIndex].Fill(pt) - - hltPt = tree.hltPt - - if tree.isOS == True: - hTotTestOS.Fill(pt) - if ((triggerBits >> 5) & 1) == 1: - if (hltPt > 35) and (l1tPt > 27.5): - hPassTestOS.Fill(pt) - else: - hTotTestSS.Fill(pt) - if ((triggerBits >> 5) & 1) == 1: - if (hltPt > 35) and (l1tPt > 27.5): - hPassTestSS.Fill(pt) - -print "Calculating efficiencies" - -#Calculating and saving the efficiencies - -c1 = TCanvas ("c1", "c1", 600, 600) -c1.SetGridx() -c1.SetGridy() -fOut = TFile ("turnOn.root", "recreate") - -for bitIndex in range(0, len(triggerNamesList)): - - hPassListHLT_OS[bitIndex].Add(hPassListHLT_SS[bitIndex], -1) - hTotListHLT_OS[bitIndex].Add(hTotListHLT_SS[bitIndex], -1) - - for binIndex in range(1, hPassListHLT_OS[bitIndex].GetNbinsX() - 1): - if hPassListHLT_OS[bitIndex].GetBinContent(binIndex) > hTotListHLT_OS[bitIndex].GetBinContent(binIndex): - hPassListHLT_OS[bitIndex].SetBinContent(binIndex, hTotListHLT_OS[bitIndex].GetBinContent(binIndex)) - - turnOnList_HLT[bitIndex].Divide(hPassListHLT_OS[bitIndex], hTotListHLT_OS[bitIndex], "cl=0.683 b(1,1) mode") - turnOnList_HLT[bitIndex].SetMarkerStyle(8) - turnOnList_HLT[bitIndex].SetMarkerSize(0.8) - turnOnList_HLT[bitIndex].SetMarkerColor(kRed) - turnOnList_HLT[bitIndex].GetXaxis().SetTitle("p_t (GeV)") - turnOnList_HLT[bitIndex].GetYaxis().SetRangeUser(0, 1.05) - turnOnList_HLT[bitIndex].GetYaxis().SetTitle("Efficiency") - turnOnList_HLT[bitIndex].SetTitle(triggerNamesList[bitIndex] + " turn-on curve") - turnOnList_HLT[bitIndex].Draw("AP") - c1.Update() - c1.Print("turnOn_" + triggerNamesList[bitIndex] + ".png", "png") - hTurnOn = hPassListHLT_OS[bitIndex].Clone("hTurnOn_" + triggerNamesList[bitIndex]) - hTurnOn.Divide(hTotListHLT_OS[bitIndex]) - hTurnOn.Write() - hPassListHLT_OS[bitIndex].Write() - hTotListHLT_OS[bitIndex].Write() - -for cutIndex in range(0, len(l1tCuts)): - - hPassListL1T_OS[cutIndex].Add(hPassListL1T_SS[cutIndex], -1) - hTotListL1T_OS[cutIndex].Add(hTotListL1T_SS[cutIndex], -1) - - for binIndex in range(1, hPassListL1T_OS[cutIndex].GetNbinsX() - 1): - if hPassListL1T_OS[cutIndex].GetBinContent(binIndex) > hTotListL1T_OS[cutIndex].GetBinContent(binIndex): - hPassListL1T_OS[cutIndex].SetBinContent(binIndex, hTotListL1T_OS[cutIndex].GetBinContent(binIndex)) - if hPassListL1T_Iso_OS[cutIndex].GetBinContent(binIndex) > hTotListL1T_Iso_OS[cutIndex].GetBinContent(binIndex): - hPassListL1T_Iso_OS[cutIndex].SetBinContent(binIndex, hTotListL1T_Iso_OS[cutIndex].GetBinContent(binIndex)) - turnOnList_L1T[cutIndex].Divide(hPassListL1T_OS[cutIndex], hTotListL1T_OS[cutIndex], "cl=0.683 b(1,1) mode") - turnOnList_L1T[cutIndex].SetMarkerStyle(8) - turnOnList_L1T[cutIndex].SetMarkerSize(0.8) - turnOnList_L1T[cutIndex].SetMarkerColor(kRed) - turnOnList_L1T[cutIndex].GetXaxis().SetTitle("p_t (GeV)"); - turnOnList_L1T[cutIndex].GetYaxis().SetTitle("Efficiency"); - turnOnList_L1T[cutIndex].SetTitle("L1 trigger cut " + str(l1tCuts[cutIndex]) + " turn-on curve") - turnOnList_L1T[cutIndex].Draw("AP") - c1.Update() - c1.Print("turnOnL1_" + str(l1tCuts[cutIndex]) + ".png", "png") - turnOnList_L1T_Iso[cutIndex].Divide(hPassListL1T_Iso_OS[cutIndex], hTotListL1T_Iso_OS[cutIndex], "cl=0.683 b(1,1) mode") - turnOnList_L1T_Iso[cutIndex].SetMarkerStyle(8) - turnOnList_L1T_Iso[cutIndex].SetMarkerSize(0.8) - turnOnList_L1T_Iso[cutIndex].SetMarkerColor(kRed) - turnOnList_L1T_Iso[cutIndex].GetXaxis().SetTitle("p_t (GeV)"); - turnOnList_L1T_Iso[cutIndex].GetYaxis().SetTitle("Efficiency"); - turnOnList_L1T_Iso[cutIndex].SetTitle("L1 trigger iso cut " + str(l1tCuts[cutIndex]) + " turn-on curve") - turnOnList_L1T_Iso[cutIndex].Draw("AP") - c1.Update() - c1.Print("turnOnL1Iso_" + str(l1tCuts[cutIndex]) + ".png", "png") - hTurnOn = hPassListL1T_OS[cutIndex].Clone("hTurnOnL1_" + str(l1tCuts[cutIndex])) - hTurnOn.Divide(hTotListL1T_OS[cutIndex]) - hTurnOn.Write() - hTurnOn_Iso = hPassListL1T_Iso_OS[cutIndex].Clone("hTurnOnL1Iso_" + str(l1tCuts[cutIndex])) - hTurnOn_Iso.Divide(hTotListL1T_Iso_OS[cutIndex]) - hTurnOn_Iso.Write() - hPassListL1T_Iso_OS[cutIndex].Write() - hTotListL1T_Iso_OS[cutIndex].Write() - hPassListL1T_OS[cutIndex].Write() - hTotListL1T_OS[cutIndex].Write() - -hPassTestOS.Add(hPassTestSS, -1) -hTotTestOS.Add(hTotTestSS, -1) - -for binIndex in range(1, hPassTestOS.GetNbinsX() - 1): - if hPassTestOS.GetBinContent(binIndex) > hTotTestOS.GetBinContent(binIndex): - hPassTestOS.SetBinContent(binIndex, hTotTestOS.GetBinContent(binIndex)) - -hTurnOnTest.Divide(hPassTestOS, hTotTestOS, "cl=0.683 b(1,1) mode") -hTurnOnTest.SetMarkerStyle(8) -hTurnOnTest.SetMarkerSize(0.8) -hTurnOnTest.SetMarkerColor(kRed) -hTurnOnTest.GetXaxis().SetTitle("p_t (GeV)") -hTurnOnTest.GetYaxis().SetRangeUser(0, 1.05) -hTurnOnTest.GetYaxis().SetTitle("Efficiency") -hTurnOnTest.SetTitle("test turn-on curve") -hTurnOnTest.Draw("AP") -c1.Update() -c1.Print("turnOnTest.png", "png") - - -raw_input() diff --git a/TauTagAndProbe/test/PtAndEtaDistribution.py b/TauTagAndProbe/test/PtAndEtaDistribution.py deleted file mode 100644 index 500c4abdb24..00000000000 --- a/TauTagAndProbe/test/PtAndEtaDistribution.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/python -# -# -# Epoch B: 273150 - 275376 -# Epoch C: 275420 - 276283 -# Epoch D: 276315 - 276811 -# -from ROOT import * -from array import array - -fIn = TFile.Open('NTuple_10Ago_Riccardo.root') - -tree = fIn.Get('Ntuplizer/TagAndProbe') - -triggerNamesTree = fIn.Get("Ntuplizer/triggerNames") - - - -print "Creating histograms" - -hPtOS = TH1F ("hPtOS", "tauPt distribution - OS pairs", 150, 0, 150) -hPtSS = TH1F ("hPtSS", "tauPt distribution - SS pairs", 150, 0, 150) - -hEtaOS = TH1F ("hEtaOS", "tauEta distribution - OS pairs", 100, -2.5, +2.5) -hEtaSS = TH1F ("hEtaSS", "tauEta distribution - SS pairs", 100, -2.5, +2.5) - -print "Populating histograms" - -#Populating the histograms -for iEv in range (0, tree.GetEntries()): - tree.GetEntry(iEv) - - #if tree.RunNumber < epochDMinRunNumber and tree.RunNumber > epochDMaxRunNumber: - # continue - - if abs(tree.tauEta) > 2.1: - continue - - tauPt = tree.tauPt - tauEta = tree.tauEta - isOS = tree.isOS - - if isOS == True: - hPtOS.Fill(tauPt) - hEtaOS.Fill(tauEta) - - if isOS == False: - hPtSS.Fill(tauPt) - hEtaSS.Fill(tauEta) - -#Calculating and saving the efficiencies - -c1 = TCanvas ("c1", "c1", 600, 600) -c1.SetGridx() -c1.SetGridy() - -hPtOS_SS = hPtOS.Clone("hPtOS_SS") -hPtOS_SS.Add(hPtSS, -1) -hPtOS_SS.SetTitle("tauPt distribution - OS-SS pairs") -hEtaOS_SS = hEtaOS.Clone("hEtaOS_SS") -hEtaOS_SS.Add(hEtaSS, -1) -hEtaOS_SS.SetTitle("tauEta distribution - OS-SS pairs") - -hPtOS.Draw() -c1.Update() -c1.Print("hPtOS.png", "png") -hPtSS.Draw() -c1.Update() -c1.Print("hPtSS.png", "png") -hPtOS_SS.Draw() -c1.Update() -c1.Print("hPtOS_SS.png", "png") - -hEtaOS.Draw() -c1.Update() -c1.Print("hEtaOS.png", "png") -hEtaSS.Draw() -c1.Update() -c1.Print("hEtaSS.png", "png") -hEtaOS_SS.Draw() -c1.Update() -c1.Print("hEtaOS_SS.png", "png") - -raw_input() diff --git a/TauTagAndProbe/test/ResolutionVsDR.py b/TauTagAndProbe/test/ResolutionVsDR.py deleted file mode 100644 index 7fafe3f76d6..00000000000 --- a/TauTagAndProbe/test/ResolutionVsDR.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/python - -from ROOT import * -from array import array - -gStyle.SetOptStat(111111) - -fIn = TFile.Open('NTuple_10Ago_Riccardo.root') -tree = fIn.Get('Ntuplizer/TagAndProbe') - -l1tResolution1 = TH1I("L1TriggerReso1", "L1T resolution for offline pt between 20 and 40 GeV", 100, -100, 100) -hltResolution1 = TH1I("HLTriggerReso1", "HLT resolution for offline pt between 20 and 40 GeV", 100, -100, 100) -l1tResolution2 = TH1I("L1TriggerReso2", "L1T resolution for offline pt between 40 and 100 GeV", 100, -100, 100) -hltResolution2 = TH1I("HLTriggerReso2", "HLT resolution for offline pt between 40 and 100 GeV", 100, -100, 100) -#x = resolution -#y = R -l1tResolutionVsDR = TH2I("L1TriggerResoVsDR", "L1T resolution vs DR OS+SS", 100, -100, 100, 500, -0.1, 0.6) -hltResolutionVsDR = TH2I("HLTriggerResoVsDR", "HLT resolution vs DR OS+SS", 100, -100, 100, 50, -0.1, 0.6) - -l1tResolutionVsDRSS = TH2I("L1TriggerResoVsDRSS", "L1T resolution vs DR SS", 100, -100, 100, 50, -0.1, 0.6) -hltResolutionVsDRSS = TH2I("HLTriggerResoVsDRSS", "HLT resolution vs DR SS", 100, -100, 100, 50, -0.1, 0.6) - -l1tResolutionVsDROS = TH2I("L1TriggerResoVsDROS", "L1T resolution vs DR OS", 100, -100, 100, 50, -0.1, 0.6) -hltResolutionVsDROS = TH2I("HLTriggerResoVsDROS", "HLT resolution vs DR OS", 100, -100, 100, 50, -0.1, 0.6) - -for iEv in range (0, tree.GetEntries()): - tree.GetEntry(iEv) - if tree.tauTriggerBits > 0: - ptOffline = tree.tauPt - ptL1T = tree.l1tPt - ptHLT = tree.hltPt - deltaPtL1T = ptOffline - ptL1T - deltaPtHLT = ptOffline - ptHLT - deltaRHLT = ((tree.tauEta - tree.hltEta)**2 + (tree.tauPhi - tree.hltPhi)**2)**0.5 - deltaRL1T = ((tree.tauEta - tree.l1tEta)**2 + (tree.tauPhi - tree.l1tPhi)**2)**0.5 - - #if tree.isOS == True: - if (ptOffline > 20) and (ptOffline < 40): - l1tResolution1.Fill(deltaPtL1T) - hltResolution1.Fill(deltaPtHLT) - if (ptOffline > 40) and (ptOffline < 100): - l1tResolution2.Fill(deltaPtL1T) - hltResolution2.Fill(deltaPtHLT) - - - hltResolutionVsDR.Fill(deltaPtHLT, deltaRHLT) - - l1tResolutionVsDR.Fill(deltaPtL1T, deltaRL1T) - - if tree.isOS == True : - hltResolutionVsDROS.Fill(deltaPtHLT, deltaRHLT) - l1tResolutionVsDROS.Fill(deltaPtL1T, deltaRL1T) - else : - hltResolutionVsDRSS.Fill(deltaPtHLT, deltaRHLT) - l1tResolutionVsDRSS.Fill(deltaPtL1T, deltaRL1T) - -# l1tResolution1.GetXaxis().SetTitle("ptOffline - ptL1T") -# l1tResolution1.Draw() -# c1.Update() -# c1.Print("l1tReso_40_60.pdf", "pdf") -# l1tResolution2.GetXaxis().SetTitle("ptOffline - ptL1T") -# l1tResolution2.Draw() -# c1.Update() -# c1.Print("l1tReso_60_100.pdf", "pdf") -# hltResolution1.GetXaxis().SetTitle("ptOffline - ptHLT") -# hltResolution1.Draw() -# c1.Update() -# c1.Print("hltReso_20_40.pdf", "pdf") -# hltResolution2.GetXaxis().SetTitle("ptOffline - ptHLT") -# hltResolution2.Draw() -# c1.Update() -# c1.Print("hltReso_40_100.pdf", "pdf") -# -hltResolutionVsDR.GetXaxis().SetTitle("ptOffline - ptHLT") -hltResolutionVsDR.GetYaxis().SetTitle("Delta R") -hltResolutionVsDR.Draw("COLZ") -c1.Update() -c1.Print("hltResoVsDR.pdf", "pdf") -# -# l1tResolutionVsDR.GetXaxis().SetTitle("ptOffline - ptL1T") -# l1tResolutionVsDR.GetYaxis().SetTitle("Delta R") -# l1tResolutionVsDR.Draw("COLZ") -# c1.Update() -# c1.Print("l1tResoVsDR.pdf", "pdf") -# -# hltResolutionVsDROS.GetXaxis().SetTitle("ptOffline - ptHLT") -# hltResolutionVsDROS.GetYaxis().SetTitle("Delta R") -# hltResolutionVsDROS.Draw("COLZ") -# c1.Update() -# c1.Print("hltResoVsDROS.pdf", "pdf") -# -# l1tResolutionVsDROS.GetXaxis().SetTitle("ptOffline - ptL1T") -# l1tResolutionVsDROS.GetYaxis().SetTitle("Delta R") -# l1tResolutionVsDROS.Draw("COLZ") -# c1.Update() -# c1.Print("l1tResoVsDROS.pdf", "pdf") -# -# hltResolutionVsDRSS.GetXaxis().SetTitle("ptOffline - ptHLT") -# hltResolutionVsDRSS.GetYaxis().SetTitle("Delta R") -# hltResolutionVsDRSS.Draw("COLZ") -# c1.Update() -# c1.Print("hltResoVsDRSS.pdf", "pdf") -# -# l1tResolutionVsDRSS.GetXaxis().SetTitle("ptOffline - ptL1T") -# l1tResolutionVsDRSS.GetYaxis().SetTitle("Delta R") -# l1tResolutionVsDRSS.Draw("COLZ") -# c1.Update() -# c1.Print("l1tResoVsDRSS.pdf", "pdf") - -raw_input() diff --git a/TauTagAndProbe/test/TriggerResolution.py b/TauTagAndProbe/test/TriggerResolution.py deleted file mode 100644 index 16c4ef5c2bc..00000000000 --- a/TauTagAndProbe/test/TriggerResolution.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/python - -from ROOT import * -from array import array - -gStyle.SetOptStat(1111) - -fIn = TFile.Open('NTuple_10Ago_Riccardo.root') -tree = fIn.Get('Ntuplizer/TagAndProbe') - -l1tResolution = TH1I("L1TriggerReso1", "L1T resolution", 200, -1, 1) -hltResolution = TH1I("HLTriggerReso1", "HLT resolution", 200, -1, 1) - -for iEv in range (0, tree.GetEntries()): - tree.GetEntry(iEv) - if tree.foundJet != 1: continue - ptOffline = tree.tauPhi - ptL1T = tree.l1tPhi - ptHLT = tree.hltPhi - if ptHLT > 0: - deltaPtHLT = (ptHLT - ptOffline)/ptOffline - hltResolution.Fill(deltaPtHLT) - if ptL1T > 0: - deltaPtL1T = (ptL1T - ptOffline)/ptOffline - l1tResolution.Fill(deltaPtL1T) - -l1tCanvas = TCanvas("l1tCanvas") -l1tResolution.GetXaxis().SetTitle("\\frac{ptL1T - ptOffline}{ptOffline}") -l1tResolution.GetYaxis().SetTitle("Events") -l1tResolution.Draw() -l1tCanvas.Update() -l1tCanvas.Print("l1tResolution.pdf", "pdf") - -hltCanvas = TCanvas("hltCanvas") -hltResolution.GetXaxis().SetTitle("\\frac{ptHLT - ptOffline}{ptOffline}") -hltResolution.GetYaxis().SetTitle("Events") -hltResolution.Draw() -hltCanvas.Update() -hltCanvas.Print("hltResolution.pdf", "pdf") - -hlt_l1t_Canvas = TCanvas("hlt_l1t_Canvas") -hltResolution.Draw() -l1tResolution.Draw("SAME") -hlt_l1t_Canvas.Update() -hlt_l1t_Canvas.Print("hlt_l1t_Resolution.pdf", "pdf") - -raw_input() diff --git a/TauTagAndProbe/test/ZeroBias_noEmul.py b/TauTagAndProbe/test/ZeroBias_noEmul.py deleted file mode 100644 index 733cbb6e9dd..00000000000 --- a/TauTagAndProbe/test/ZeroBias_noEmul.py +++ /dev/null @@ -1,109 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -from Configuration.StandardSequences.Eras import eras - -isMC = False - -process = cms.Process("ZeroBias",eras.Run2_2017) -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load('Configuration.StandardSequences.RawToDigi_Data_cff') -process.load('Configuration.StandardSequences.EndOfProcess_cff') -process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') -process.load('Configuration.StandardSequences.Services_cff') -process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -process.load('FWCore.MessageService.MessageLogger_cfi') -process.load('Configuration.EventContent.EventContent_cff') -process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff') - -process.load('Configuration.StandardSequences.Services_cff') -process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -process.load('FWCore.MessageService.MessageLogger_cfi') -process.load('Configuration.EventContent.EventContent_cff') -process.load('SimGeneral.MixingModule.mixNoPU_cfi') -process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -process.load('Configuration.StandardSequences.MagneticField_cff') -process.load('Configuration.StandardSequences.RawToDigi_cff') -process.load('Configuration.StandardSequences.EndOfProcess_cff') -process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') - - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple_ZB.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - - - -if not isMC: - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '92X_dataRun2_HLT_v3' - process.load('TauTagAndProbe.TauTagAndProbe.zeroBias_cff') - process.source = cms.Source("PoolSource", - #fileNames = cms.untracked.vstring("/store/data/Run2017B/HLTPhysics/MINIAOD/PromptReco-v1/000/297/101/00000/06603FD2-2457-E711-9D4C-02163E014329.root" - fileNames = cms.untracked.vstring("/store/data/Run2017B/SingleElectron/MINIAOD/PromptReco-v1/000/297/046/00000/02CBE6D1-4456-E711-82F5-02163E019D97.root" - ), - ) - - - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - - - -process.ZeroBias.L1Tau = cms.InputTag("hltGtStage2Digis", "Tau", "MYHLT") -process.ZeroBias.l1tJetCollection = cms.InputTag("hltGtStage2Digis", "Jet", "MYHLT") -process.ZeroBias.triggerSet = cms.InputTag("selectedPatTriggerCustom","","MYHLT") -process.ZeroBias.triggerResultsLabel = cms.InputTag("TriggerResults", "", "MYHLT") -process.ZeroBias.L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolationL1TauSeeded", "", "MYHLT") -process.ZeroBias.L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducerL1TauSeeded", "", "MYHLT") -#process.ZeroBias.triggerSet = cms.InputTag("patTriggerUnpacker") - -process.p = cms.Path ( - #process.patTriggerUnpackerSeq + - process.NtupleZeroBiasSeq -) - - - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) - diff --git a/TauTagAndProbe/test/convertTreeForFitting.py b/TauTagAndProbe/test/convertTreeForFitting.py deleted file mode 100644 index 4e0c94b0b56..00000000000 --- a/TauTagAndProbe/test/convertTreeForFitting.py +++ /dev/null @@ -1,203 +0,0 @@ -from ROOT import * -import numpy as n - -MC = True -DYJets = True # False means WJet enriched cuts will be used, True means cuts for DYJet enriched samples will be used - -if MC: - saveOnlyOS = True # True; save only OS, False; save both and store weight for bkg sub - if DYJets: tauGenMatching = True - if not DYJets: tauGenMatching = False - excludeLumiSections = False - print "==> OS events are stored and tau gen matching is applied for MC samples! <==" -else: - saveOnlyOS = False # True; save only OS, False; save both and store weight for bkg sub - tauGenMatching = False - excludeLumiSections = True - print "==> SS events are stored as weights and applied to suppress the bkg for Data samples! <==" - print "==> The additional lumi sections in \"Final/Cert_294927-306462_13TeV_PromptReco_Collisions17_JSON.txt\" compared to \"ReReco/Cert_294927-306462_13TeV_EOY2017ReReco_Collisions17_JSON.txt\" are removed for Data samples! <==" - -disabledPScolumns = False # True; to remove the disabled columns, False; to consider all columns - -# the hadd of all the output ntuples -path = "/eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/" -#Final files for the second round of SFs -if MC: - if DYJets: - fname = path + "NTuple_DYJetsToLL_12Apr2018_v1Andext1v1_12062018_PU_1000binMC.root" - if not DYJets: - fname = path + "NTuple_0WJets_12Apr2018_12062018_PU_1000binMC.root" -else: - fname = path + "NTuple_Data_Run2017BCDEF_31Mar2018_12062018.root" - -#Final files for the first round of SFs -#path = "/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/" -#fname = path + "NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018.root" -#fname = path +"NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_nomPlusExt_14_01_2018_PU.root" - -#pt = [20, 26, 30, 34] -pt = [20, 26, 30, 32, 34] -numberOfHLTTriggers = 23 -numberOfHLTTriggersForFit = 26 - - -####################################################### -fIn = TFile.Open(fname) -tIn = fIn.Get('Ntuplizer/TagAndProbe') -tTriggerNames = fIn.Get("Ntuplizer/triggerNames") -if MC: - suppressionType = "OStauGenMatched" -else: - suppressionType = "SSsubtraction" -if DYJets: - outname = fname.replace ('.root', '_' + suppressionType + 'TightWPold2017v1_forFit2.root') -else: - outname = fname.replace ('.root', '_' + suppressionType + '_WjetEnriched_MediumWP2017v2_forFit.root') -fOut = TFile (outname, 'recreate') -tOut = tIn.CloneTree(0) -tOutNames = tTriggerNames.CloneTree(-1) # copy all - -briso = [n.zeros(1, dtype=int) for x in range (0, len(pt))] -brnoiso = [n.zeros(1, dtype=int) for x in range (0, len(pt))] -bkgSubW = n.zeros(1, dtype=float) -bkgSubANDpuW = n.zeros(1, dtype=float) - -hltPathTriggered_OS = [n.zeros(1, dtype=int) for x in range (0, numberOfHLTTriggersForFit+1)] - -for i in range (0, len(pt)): - name = ("hasL1_" + str(pt[i])) - tOut.Branch(name, brnoiso[i], name+"/I") - name += "_iso" - tOut.Branch(name, briso[i], name+"/I") - -for i in range (0, numberOfHLTTriggersForFit): - tTriggerNames.GetEntry(i) - if(i < numberOfHLTTriggers): - name = ("hasHLTPath_" + str(i)) - elif(i==numberOfHLTTriggers): - name = ("hasHLTetauPath_13")# _IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30") - elif(i==numberOfHLTTriggers+1): - name = ("hasHLTmutauPath_13") #_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau32") - elif(i==numberOfHLTTriggers+2): - name = ("hasHLTditauPath_9or10or11")#_TightTau35TightIDor_MediumTau35TightIDplusHLTTau40or_TightTau35plusHLTTau40_plusL1Tau32") - tOut.Branch(name, hltPathTriggered_OS[i], name+"/I") - -#tOut.Branch("isoHLT", hltPathTriggered_OS[6], name+"/I") - -tOut.Branch("bkgSubW", bkgSubW, "bkgSubW/D") -tOut.Branch("bkgSubANDpuW", bkgSubANDpuW, "bkgSubANDpuW/D") - -nentries = tIn.GetEntries() -for ev in range (0, nentries): - tIn.GetEntry(ev) - if (ev%10000 == 0) : print ev, "/", nentries - - if abs(tIn.tauEta) > 2.1: - continue - - if saveOnlyOS and not tIn.isOS: - continue - - for i in range (0, len(pt)): - briso[i][0] = 0 - brnoiso[i][0] = 0 - - for i in range (0, numberOfHLTTriggersForFit): - hltPathTriggered_OS[i][0] = 0 - - L1iso = True if tIn.l1tIso == 1 else False - L1pt = tIn.l1tPt - for i in range(0, len(pt)): - # print L1pt, pt[i] - # - if L1pt > pt[i]: - brnoiso[i][0] = 1 - # print "SUCCESS!! ", brnoiso[i] - if L1iso: - briso[i][0] = 1 - - triggerBits = tIn.tauTriggerBits - HLTpt = tIn.hltPt - RunNumber = tIn.RunNumber - lumi = tIn.lumi - - for bitIndex in range(0, numberOfHLTTriggers): - import itertools as it - if bitIndex in it.chain(range(6, 13), range(19, 23)): # apply this L1 cut only for di-tau triggers - if (bitIndex==9 or bitIndex==10): - if ((triggerBits >> bitIndex) & 1) == 1 and (L1pt>=32) and HLTpt[bitIndex]>=40: - hltPathTriggered_OS[bitIndex][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - else: - if ((triggerBits >> bitIndex) & 1) == 1 and (L1pt>=32): - hltPathTriggered_OS[bitIndex][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - else: - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[bitIndex][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - - if(bitIndex==13): - if (((triggerBits >> bitIndex) & 1) == 1 and L1pt>=26 and L1iso>0 and HLTpt[bitIndex]>=30): - hltPathTriggered_OS[numberOfHLTTriggers][0] = 1 # this is the path for etau trigger. So (L1iso) should be applied here! - else: - hltPathTriggered_OS[numberOfHLTTriggers][0] = 0 - - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[numberOfHLTTriggers+1][0] = 1 # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - else: - hltPathTriggered_OS[numberOfHLTTriggers+1][0] = 0 - - if (((((triggerBits >> 9) & 1) == 1 and HLTpt[9]>=40) or (((triggerBits >> 10) & 1) == 1 and HLTpt[10]>=40) or (((triggerBits >> 11) & 1) == 1)) and L1pt>=32): - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 1 # this is the path for di-tau trigger. HLTpt cut is required to have the same threshold on tau + L1Pt is needed due to L1 matching differences between MC and Data - else: - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 0 - - bkgSubW[0] = 1. if tIn.isOS else -1. - #if (L1pt > 26) and (L1iso) and (HLTpt > 32) and (((triggerBits >> 2) & 1) == 1): - # hltPathTriggered_OS[6][0] = 1 - #else: - # hltPathTriggered_OS[6][0] = 0 - - if not "Data" in fname: - puweight = tIn.puweight - else: - puweight = 1 - - bkgSubANDpuW[0] = bkgSubW[0]*puweight - - if(tIn.byTightIsolationMVArun2017v1DBoldDMwLT2017 > 0.5): - #Mass cuts, mt and mvis for DY Jets - if DYJets: - if(tIn.mT < 30 and tIn.mVis >40 and tIn.mVis < 80): - if(tauGenMatching): #for tau gen matching - if(tIn.tau_genindex > 0): - tOut.Fill() - elif(excludeLumiSections): # for removing the extra lumi sections in Final golden JSON file compared to Rereco ones - if not ((RunNumber==299480 and lumi==7) or (RunNumber==301397 and lumi==518)): - tOut.Fill() - else: - tOut.Fill() - #High mT requirement for WJets - elif not DYJets: - if(tIn.mT > 30): - if(tauGenMatching): #for tau gen matching - if(tIn.tau_genindex > 0): - tOut.Fill() - elif(excludeLumiSections): # for removing the extra lumi sections in Final golden JSON file compared to Rereco ones - if not ((RunNumber==299480 and lumi==7) or (RunNumber==301397 and lumi==518)): - tOut.Fill() - else: - tOut.Fill() -""" -if(disabledPScolumns): # for removing the disabled PS columns: - if((RunNumber<305177 and PS_column>=2) or (RunNumber>=305178 and RunNumber<=305387 and PS_column>=2 and PS_column!=10) or (RunNumber>=305388 and PS_column>=3 and PS_column!=11 and PS_column!=12)): - tOut.Fill() -""" - -tOutNames.Write() -tOut.Write() -fOut.Close() diff --git a/TauTagAndProbe/test/convertTreeForFitting2016.py b/TauTagAndProbe/test/convertTreeForFitting2016.py deleted file mode 100644 index 89fb9895f58..00000000000 --- a/TauTagAndProbe/test/convertTreeForFitting2016.py +++ /dev/null @@ -1,197 +0,0 @@ -from ROOT import * -import numpy as n - -MC = True -DYJets = True # False means WJet enriched cuts will be used, True means cuts for DYJet enriched samples will be used - - -if MC: - saveOnlyOS = True # True; save only OS, False; save both and store weight for bkg sub - if DYJets: tauGenMatching = True - if not DYJets: tauGenMatching = False - excludeLumiSections = False - print "==> OS events are stored and tau gen matching is applied for MC samples! <==" -else: - saveOnlyOS = False # True; save only OS, False; save both and store weight for bkg sub - tauGenMatching = False - excludeLumiSections = False - print "==> SS events are stored as weights and applied to suppress the bkg for Data samples! <==" - -# the hadd of all the output ntuples -#path = "/eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples_AFS/Samples2016/syncronisedSamples/2017_11_23/" -path = "/eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples2016_Legacy94X/190226/" - -#fname = "./NTuple_SingleMu_DYMC_2016.root" -fname = path + "Ntuple_DYJets_RunIISummer16MiniAODv3_94X_mcRun2_ext1ANDext2-v2_190306_PUreweight.root" -#fname = path +"Ntuple_SingleMuon_Run2016BtoH-17Jul2018_190228.root" -#fname = path + "Ntuple_SingleMuon_Run2016G-17Jul2018-v1_190226.root" - - -#pt = [20, 26, 30, 34] -pt = [20, 26, 30, 32, 34] -numberOfHLTTriggers = 10 # the last one for an extra path to be filled with the events passes trigger=3 for 2016G corresponding mediumiso and trigger=4 for 2016H for McombinedIso -numberOfHLTTriggersForFit = 14 - -saveOnlyOS = False # True; save only OS, False: save both and store weight for bkg sub -print fname -####################################################### -fIn = TFile.Open(fname) -tIn = fIn.Get('Ntuplizer/TagAndProbe') -tTriggerNames = fIn.Get("Ntuplizer/triggerNames") -if MC: - suppressionType = "OStauGenMatched" -else: - suppressionType = "SSsubtraction" - -if DYJets: - outname = fname.replace ('.root', '_' + suppressionType + '_VVLooseWP2017v2_forFit.root') -else: - outname = fname.replace ('.root', '_' + suppressionType + '_WjetEnriched_MediumWP2017v2_forFit.root') - -fOut = TFile (outname, 'recreate') -tOut = tIn.CloneTree(0) -tOutNames = tTriggerNames.CloneTree(-1) # copy all - -briso = [n.zeros(1, dtype=int) for x in range (0, len(pt))] -brnoiso = [n.zeros(1, dtype=int) for x in range (0, len(pt))] -bkgSubW = n.zeros(1, dtype=float) -bkgSubANDpuW = n.zeros(1, dtype=float) -#tauPt_35GeV = n.zeros(1, dtype=bool) -#tauPt_27GeV = n.zeros(1, dtype=bool) - -hltPathTriggered_OS = [n.zeros(1, dtype=int) for x in range (0, numberOfHLTTriggersForFit+1)] - -for i in range (0, len(pt)): - name = ("hasL1_" + str(pt[i])) - tOut.Branch(name, brnoiso[i], name+"/I") - name += "_iso" - tOut.Branch(name, briso[i], name+"/I") - -for i in range (0, numberOfHLTTriggersForFit): - tTriggerNames.GetEntry(i) - if(i < numberOfHLTTriggers): - name = ("hasHLTPath_" + str(i)) - elif(i==numberOfHLTTriggers): - name = ("hasHLTmutauPath_0") #HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v - elif(i==numberOfHLTTriggers+1): - name = ("hasHLTmutauPath_1") # HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v - elif(i==numberOfHLTTriggers+2): - name = ("hasHLTetauPath_0and1")# mutau_0and1_and_plusL1Tau26andHLTTau30") - elif(i==numberOfHLTTriggers+3): - name = ("hasHLTditauPath_3or4") #HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v ===== HLT_3=> HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v for MC - - tOut.Branch(name, hltPathTriggered_OS[i], name+"/I") - -tOut.Branch("bkgSubW", bkgSubW, "bkgSubW/D") -tOut.Branch("bkgSubANDpuW", bkgSubANDpuW, "bkgSubANDpuW/D") - - -nentries = tIn.GetEntries() -for ev in range (0, nentries): - tIn.GetEntry(ev) - if (ev%10000 == 0) : print ev, "/", nentries - - if abs(tIn.tauEta) > 2.1: - continue - - PS_column = tIn.PS_column - RunNumber = tIn.RunNumber - - if saveOnlyOS and not tIn.isOS: - continue - - for i in range (0, len(pt)): - briso[i][0] = 0 - brnoiso[i][0] = 0 - - for i in range (0, numberOfHLTTriggers): - hltPathTriggered_OS[i][0] = 0 - - L1iso = True if tIn.l1tIso > 0 else False - L1pt = tIn.l1tPt - for i in range(0, len(pt)): - # print L1pt, pt[i] - # - if L1pt > pt[i]: - brnoiso[i][0] = 1 - # print "SUCCESS!! ", brnoiso[i] - if L1iso: - briso[i][0] = 1 - - - triggerBits = tIn.tauTriggerBits - HLTpt = tIn.hltPt - - for bitIndex in range(0, numberOfHLTTriggers): - if bitIndex==0: - if ((triggerBits >> bitIndex) & 1) == 1: # and (HLTpt>=27): # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - hltPathTriggered_OS[bitIndex][0] = 1 - hltPathTriggered_OS[numberOfHLTTriggers][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - hltPathTriggered_OS[numberOfHLTTriggers][0] = 0 - elif bitIndex==1: - if ((triggerBits >> bitIndex) & 1) == 1: # and (HLTpt>=27): # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - hltPathTriggered_OS[bitIndex][0] = 1 - hltPathTriggered_OS[numberOfHLTTriggers+1][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - hltPathTriggered_OS[numberOfHLTTriggers+1][0] = 0 - else: - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[bitIndex][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - - if not MC: - if ((((RunNumber < 276215 and ((triggerBits >> 1) & 1) == 1)) or (RunNumber > 276215 and RunNumber < 278270 and ((triggerBits >> 0) & 1) == 1)) or (RunNumber > 278270 and ((triggerBits >> 0) & 1) == 1 and HLTpt[0] >=30 and L1pt >= 26 and L1iso>0)): - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 1 # this is the path for e-tau trigger. HLTpt and L1 cuts are required to have the same threshold on tau. Different mutau paths are used for etau meausurment for different runs - else: - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 0 - elif MC: - if ((((triggerBits >> 1) & 1) == 1)): - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 1 # this is the path for e-tau trigger. No HLTpt and L1 cuts are required to have the same threshold on tau. Only one mutau path is used for etau meausurments in MC, since OR also gives the same efficiencies - else: - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 0 - - if not MC: - if ((("2016H" in fname) and ((triggerBits >> 4) & 1) == 1) or (("2016H" not in fname) and ((triggerBits >> 3) & 1) == 1 ) and (HLTpt[3]>=35)): #just run once more!!! I modified this for HLT35GeV but did not produce new results!!! - hltPathTriggered_OS[numberOfHLTTriggers+3][0] = 1 - else: - hltPathTriggered_OS[numberOfHLTTriggers+3][0] = 0 - #bitIndex==4: # should be 4 for 2016H and 3 for 2016G - elif MC: - if (((((triggerBits >> 4) & 1) == 1) or (((triggerBits >> 3) & 1) == 1)) and (HLTpt[3]>=35)): - hltPathTriggered_OS[numberOfHLTTriggers+3][0] = 1 - else: - hltPathTriggered_OS[numberOfHLTTriggers+3][0] = 0 - - if not "Run" in fname: - puweight = tIn.puweight - else: - puweight = 1 - bkgSubW[0] = 1. if tIn.isOS else -1. - - - if(tIn.byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 > 0.5): - #Mass cuts, mt and mvis for DY Jets - if DYJets: - if(tIn.mT < 30 and tIn.mVis >40 and tIn.mVis < 80): - if(tauGenMatching): #for tau gen matching - if(tIn.tau_genindex > 0): - tOut.Fill() - else: - tOut.Fill() - #High mT requirement for WJets - elif not DYJets: - if(tIn.mT > 30): - if(tauGenMatching): #for tau gen matching - if(tIn.tau_genindex > 0): - tOut.Fill() - else: - tOut.Fill() - -tOutNames.Write() -tOut.Write() -fOut.Close() - diff --git a/TauTagAndProbe/test/convertTreeForFitting2018.py b/TauTagAndProbe/test/convertTreeForFitting2018.py deleted file mode 100644 index cbf83df3efa..00000000000 --- a/TauTagAndProbe/test/convertTreeForFitting2018.py +++ /dev/null @@ -1,278 +0,0 @@ -from ROOT import * -import numpy as n - -MC = True -DYJets = True # False means WJet enriched cuts will be used, True means cuts for DYJet enriched samples will be used - -Sample2017 = False -Sample2018 = True - -if MC: - saveOnlyOS = True # True; save only OS, False; save both and store weight for bkg sub - if DYJets: tauGenMatching = True - if not DYJets: tauGenMatching = False - excludeLumiSections = False - print "==> OS events are stored and tau gen matching is applied for MC samples! <==" -else: - saveOnlyOS = False # True; save only OS, False; save both and store weight for bkg sub - tauGenMatching = False - excludeLumiSections = False - print "==> SS events are stored as weights and applied to suppress the bkg for Data samples! <==" - -disabledPScolumns = False # True; to remove the disabled columns, False; to consider all columns - -# the hadd of all the output ntuples -path = "/eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples2018/190110/" - -if MC: - if DYJets: - fname = path + "Ntuple_SingleMuon_RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1_190121_PUreweight1000MCbin.root" - if not DYJets: - fname = path + "NTuple_0WJets_12Apr2018_12062018_PU_1000binMC.root" -else: - #fname = path + "Ntuple_SingleMuon_Run2018ABCDReReco17SepPromptRecoD_190121.root" - #fname = path + "Ntuple_SingleMuon_Run2018D-PromptReco-v2_190121.root" - fname = path + "Ntuple_SingleMuon_Run2018C-17Sep2018-v1_190121.root" - #fname = "./NTuple_SingleMu_Data_2018_filterTest_Run201.root" - - - -#pt = [20, 26, 30, 34] -pt = [20, 26, 30, 32, 34] -if(Sample2017): - numberOfHLTTriggers = 23 - numberOfHLTTriggersForFit = 26 -if(Sample2018): - numberOfHLTTriggers = 27 - numberOfHLTTriggersForFit = 40 - - - -####################################################### -fIn = TFile.Open(fname) -tIn = fIn.Get('Ntuplizer/TagAndProbe') -tTriggerNames = fIn.Get("Ntuplizer/triggerNames") -if MC: - suppressionType = "OStauGenMatched" -else: - suppressionType = "SSsubtraction" -if DYJets: - outname = fname.replace ('.root', '_' + suppressionType + '_VVLooseWP2017v2_forFit_260419_test.root') -else: - outname = fname.replace ('.root', '_' + suppressionType + '_WjetEnriched_MediumWP2017v2_forFit.root') -fOut = TFile (outname, 'recreate') -tOut = tIn.CloneTree(0) -tOutNames = tTriggerNames.CloneTree(-1) # copy all - -briso = [n.zeros(1, dtype=int) for x in range (0, len(pt))] -brnoiso = [n.zeros(1, dtype=int) for x in range (0, len(pt))] -bkgSubW = n.zeros(1, dtype=float) -bkgSubANDpuW = n.zeros(1, dtype=float) - -hltPathTriggered_OS = [n.zeros(1, dtype=int) for x in range (0, numberOfHLTTriggersForFit+1)] - -for i in range (0, len(pt)): - name = ("hasL1_" + str(pt[i])) - tOut.Branch(name, brnoiso[i], name+"/I") - name += "_iso" - tOut.Branch(name, briso[i], name+"/I") - - -for i in range (0, numberOfHLTTriggersForFit): - tTriggerNames.GetEntry(i) - if(i < numberOfHLTTriggers): - name = ("hasHLTPath_" + str(i)) - elif(i==numberOfHLTTriggers): - name = ("hasHLTetauPath_14HPS")# _IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30") - elif(i==numberOfHLTTriggers+1): - name = ("hasHLTmutauPath_14HPS") #_IsoMu20_LooseChargedIsoPFTau27") - elif(i==numberOfHLTTriggers+2): - name = ("hasHLTditauPath_15or20HPS")#_TightTau35TightIDor_MediumTau35TightIDplusHLTTau40or_TightTau35plusHLTTau40") - elif(i==numberOfHLTTriggers+3): #HLT_15 for MC and HLT_20 for data - name = ("hasHLTetauPath_8noHPS")# _IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30") - elif(i==numberOfHLTTriggers+4): - name = ("hasHLTmutauPath_8noHPS") #_IsoMu20_LooseChargedIsoPFTau27") - elif(i==numberOfHLTTriggers+5): - name = ("hasHLTditauPath_4or5or6noHPS")#_TightTau35TightIDor_MediumTau35TightIDplusHLTTau40or_TightTau35plusHLTTau40") - elif(i==numberOfHLTTriggers+6): - name = ("hasHLTetauPath_8and14") - elif(i==numberOfHLTTriggers+7): - name = ("hasHLTmutauPath_8and14") - elif(i==numberOfHLTTriggers+8): - name = ("hasHLTditauPath_4or5or6and15or20") - tOut.Branch(name, hltPathTriggered_OS[i], name+"/I") - - #tOut.Branch(name, hltPathTriggered_OS[i], name+"/I") - -#tOut.Branch("isoHLT", hltPathTriggered_OS[6], name+"/I") - -tOut.Branch("bkgSubW", bkgSubW, "bkgSubW/D") -tOut.Branch("bkgSubANDpuW", bkgSubANDpuW, "bkgSubANDpuW/D") - -nentries = tIn.GetEntries() -for ev in range (0, nentries): - tIn.GetEntry(ev) - if (ev%10000 == 0) : print ev, "/", nentries - - if abs(tIn.tauEta) > 2.1: - continue - - if saveOnlyOS and not tIn.isOS: - continue - - for i in range (0, len(pt)): - briso[i][0] = 0 - brnoiso[i][0] = 0 - - for i in range (0, numberOfHLTTriggersForFit): - hltPathTriggered_OS[i][0] = 0 - - L1iso = True if tIn.l1tIso == 1 else False - L1pt = tIn.l1tPt - for i in range(0, len(pt)): - # print L1pt, pt[i] - # - if L1pt > pt[i]: - brnoiso[i][0] = 1 - # print "SUCCESS!! ", brnoiso[i] - if L1iso: - briso[i][0] = 1 - - triggerBits = tIn.tauTriggerBits - HLTpt = tIn.hltPt - RunNumber = tIn.RunNumber - lumi = tIn.lumi - - for bitIndex in range(0, numberOfHLTTriggers): - import itertools as it -# if bitIndex in it.chain(range(3, 6), range(20, 23)): # di-tau paths for non-HPS and HPS ones - if (bitIndex==20): - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[bitIndex][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - else: - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[bitIndex][0] = 1 - else: - hltPathTriggered_OS[bitIndex][0] = 0 - - if(bitIndex==14): #mutau - if (((triggerBits >> bitIndex) & 1) == 1 and L1pt>=26 and L1iso>0 and HLTpt[bitIndex]>=30): - hltPathTriggered_OS[numberOfHLTTriggers][0] = 1 # this is the path for etau trigger. So (L1iso) should be applied here! - else: - hltPathTriggered_OS[numberOfHLTTriggers][0] = 0 - - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[numberOfHLTTriggers+1][0] = 1 # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - else: - hltPathTriggered_OS[numberOfHLTTriggers+1][0] = 0 - - if(bitIndex==8): #mutau - if (((triggerBits >> bitIndex) & 1) == 1 and L1pt>=26 and L1iso>0 and HLTpt[bitIndex]>=30): - hltPathTriggered_OS[numberOfHLTTriggers+3][0] = 1 # this is the path for etau trigger. So (L1iso) should be applied here! - else: - hltPathTriggered_OS[numberOfHLTTriggers+3][0] = 0 - - if ((triggerBits >> bitIndex) & 1) == 1: - hltPathTriggered_OS[numberOfHLTTriggers+4][0] = 1 # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - else: - hltPathTriggered_OS[numberOfHLTTriggers+4][0] = 0 - - # ditau - if (((((triggerBits >> 4) & 1) == 1 and HLTpt[4]>=40) or (((triggerBits >> 5) & 1) == 1 and HLTpt[5]>=40) or (((triggerBits >> 6) & 1) == 1))): - hltPathTriggered_OS[numberOfHLTTriggers+5][0] = 1 # this is the path for di-tau trigger. HLTpt cut is required to have the same threshold on tau + L1Pt is needed due to L1 matching differences between MC and Data - else: - hltPathTriggered_OS[numberOfHLTTriggers+5][0] = 0 - if(MC): #ditau - if (((triggerBits >> 15) & 1) == 1): - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 1 # this is the path for ditau trigger for MC samples - else: - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 0 - - elif(not MC): #ditau - if (((triggerBits >> 20) & 1) == 1): - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 1 # this is the path for ditau trigger for data - else: - hltPathTriggered_OS[numberOfHLTTriggers+2][0] = 0 - - if(MC): - if (((triggerBits >> 14) & 1) == 1 and L1pt>=26 and L1iso>0 and HLTpt[14]>=30): - hltPathTriggered_OS[numberOfHLTTriggers+6][0] = 1 # this is the path for etau trigger. So (L1iso) should be applied here! - else: - hltPathTriggered_OS[numberOfHLTTriggers+6][0] = 0 - - if ((triggerBits >> 14) & 1) == 1: - hltPathTriggered_OS[numberOfHLTTriggers+7][0] = 1 # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - else: - hltPathTriggered_OS[numberOfHLTTriggers+7][0] = 0 - - if (((triggerBits >> 15) & 1) == 1): - hltPathTriggered_OS[numberOfHLTTriggers+8][0] = 1 - else: - hltPathTriggered_OS[numberOfHLTTriggers+8][0] = 0 - else: - if(RunNumber < 317509): - if (((triggerBits >> 8) & 1) == 1 and L1pt>=26 and L1iso>0 and HLTpt[8]>=30): - hltPathTriggered_OS[numberOfHLTTriggers+6][0] = 1 # this is the path for etau trigger. So (L1iso) should be applied here! - else: - hltPathTriggered_OS[numberOfHLTTriggers+6][0] = 0 - - if ((triggerBits >> 8) & 1) == 1: - hltPathTriggered_OS[numberOfHLTTriggers+7][0] = 1 # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - else: - hltPathTriggered_OS[numberOfHLTTriggers+7][0] = 0 - - if (((((triggerBits >> 4) & 1) == 1 and HLTpt[4]>=40) or (((triggerBits >> 5) & 1) == 1 and HLTpt[5]>=40) or (((triggerBits >> 6) & 1) == 1))): - # this is the path for di-tau trigger. HLTpt cut is required to have the same threshold on tau + L1Pt is needed due to L1 matching differences between MC and Data - hltPathTriggered_OS[numberOfHLTTriggers+8][0] = 1 - else: - hltPathTriggered_OS[numberOfHLTTriggers+8][0] = 0 - else: - - if (((triggerBits >> 14) & 1) == 1 and L1pt>=26 and L1iso>0 and HLTpt[14]>=30): - hltPathTriggered_OS[numberOfHLTTriggers+6][0] = 1 # this is the path for etau trigger. So (L1iso) should be applied here! - else: - hltPathTriggered_OS[numberOfHLTTriggers+6][0] = 0 - - if ((triggerBits >> 14) & 1) == 1: - hltPathTriggered_OS[numberOfHLTTriggers+7][0] = 1 # this is the path for mutau trigger. So no extra requirement is needed like: L1pt and L1iso and HLTpt - else: - hltPathTriggered_OS[numberOfHLTTriggers+7][0] = 0 - - if (((triggerBits >> 20) & 1) == 1): - hltPathTriggered_OS[numberOfHLTTriggers+8][0] = 1 - else: - hltPathTriggered_OS[numberOfHLTTriggers+8][0] = 0 - - - bkgSubW[0] = 1. if tIn.isOS else -1. - - if not "Run201" in fname: - puweight = tIn.puweight - else: - puweight = 1 - - bkgSubANDpuW[0] = bkgSubW[0]*puweight - - if(tIn.byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 > 0.5): - #Mass cuts, mt and mvis for DY Jets - if DYJets: - if(tIn.mT < 30 and tIn.mVis >40 and tIn.mVis < 80): - if(tauGenMatching): #for tau gen matching - if(tIn.tau_genindex > 0): - tOut.Fill() - else: - tOut.Fill() - #High mT requirement for WJets - elif not DYJets: - if(tIn.mT > 30): - if(tauGenMatching): #for tau gen matching - if(tIn.tau_genindex > 0): - tOut.Fill() - else: - tOut.Fill() - -tOutNames.Write() -tOut.Write() -fOut.Close() diff --git a/TauTagAndProbe/test/crab3AutoSubmitConfigForMultipleFiles.py b/TauTagAndProbe/test/crab3AutoSubmitConfigForMultipleFiles.py deleted file mode 100644 index 111e4dae6d5..00000000000 --- a/TauTagAndProbe/test/crab3AutoSubmitConfigForMultipleFiles.py +++ /dev/null @@ -1,111 +0,0 @@ -# TEMPLATE used for automatic script submission of multiple datasets - -from WMCore.Configuration import Configuration -config = Configuration() -from CRABClient.UserUtilities import getUsernameFromSiteDB -from multiprocessing import Process -from CRABAPI.RawCommand import crabCommand - - - -import datetime -today=datetime.date.today().strftime("%Y-%m-%d") -date=today - -def submit(config): - crabCommand('submit', config = config) - #except HTTPException as hte: - # print "Failed submitting task: %s" % (hte.headers) - #except ClientException as cle: - # print "Failed submitting task: %s" % (cle) - - -config.section_("General") - -config.General.workArea = 'DefaultCrab3Area' - -config.section_("JobType") -config.JobType.pluginName = 'Analysis' -config.JobType.disableAutomaticOutputCollection = False - -config.section_("Data") -config.Data.inputDBS = 'global' -config.Data.splitting = 'Automatic' #EventAwareLumiBased' -#config.Data.unitsPerJob = 180 #number of events per jobs -config.Data.totalUnits = -1 #number of event -config.Data.outLFNDirBase = '/store/user/%s/trigger/TagAndProbeTrees/%s'%(getUsernameFromSiteDB(), date) -config.Data.publication = False -config.Data.allowNonValidInputDataset = True - -config.section_("Site") -config.Site.storageSite = 'T2_DE_RWTH' - - -samples2016New = ["/SingleMuon/Run2016B-17Jul2018_ver2-v1/MINIAOD", "/SingleMuon/Run2016C-17Jul2018-v1/MINIAOD", "/SingleMuon/Run2016D-17Jul2018-v1/MINIAOD", "/SingleMuon/Run2016E-17Jul2018-v1/MINIAOD", "/SingleMuon/Run2016F-17Jul2018-v1/MINIAOD", "/SingleMuon/Run2016G-17Jul2018-v1/MINIAOD", "/SingleMuon/Run2016H-17Jul2018-v1/MINIAOD"] -samples2017 = ["/SingleMuon/Run2017B-31Mar2018-v1/MINIAOD","/SingleMuon/Run2017C-31Mar2018-v1/MINIAOD","/SingleMuon/Run2017D-31Mar2018-v1/MINIAOD","/SingleMuon/Run2017E-31Mar2018-v1/MINIAOD","/SingleMuon/Run2017F-31Mar2018-v1/MINIAOD"] -sampleReReco2018 = ["/SingleMuon/Run2018A-17Sep2018-v2/MINIAOD","/SingleMuon/Run2018B-17Sep2018-v1/MINIAOD","/SingleMuon/Run2018C-17Sep2018-v1/MINIAOD"] -sample2018 = ["/SingleMuon/Run2018D-PromptReco-v2/MINIAOD"] # different global tags are used for promptreco and rereco, so they need to be run separately - -samples2016MC = ["/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3_ext1-v2/MINIAODSIM", "/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3_ext2-v2/MINIAODSIM"] -samples2017MC = ["/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM","/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14_ext1-v1/MINIAODSIM","/WJetsToLNu_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14_ext1-v2/MINIAODSIM"] -samples2018MC = ['/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM'] - - -# ***** Choose which samples do you want to submit!***** -samples = "" -#samples = samples2016MC - -if (samples == ""): - print "====================================================================" - print " Please choose which samples do you want to submit within the code! " - print "====================================================================" - - -for index, sample in enumerate(samples): - - print "ampleS", sample - if("Run201" in sample): - samplenickname = (sample.split("/",1)[-1]).replace("/","_") - config.General.requestName = samplenickname[:100] + "_" + date - print "here", samplenickname[:100] + "_" + date - elif("Jets" in sample): - samplenickname = (sample.split("/",1)[-1]).replace("/","_") - config.General.requestName = samplenickname[:80] + "_" + date - print "here", samplenickname[:100] + "_" + date - # pSet files are given here depending on the 2016 or 2017 samples - if "2016" in sample or "Summer16" in sample: - config.JobType.psetName = 'test_SingleMu2016_TandP.py' - elif "2017" in sample or "Fall17" in sample: - config.JobType.psetName = 'test_SingleMu297050_TandP.py' - elif "2018" in sample or "Autumn18" in sample: - config.JobType.psetName = 'test_SingleMu2018_TandP.py' - - #config.JobType.pyCfgParams = ['outputfilename=%s.root'%(samplenickname)] - #config.JobType.outputFiles = ['%s.root'%(samplenickname)] - - config.Data.inputDataset = sample - config.Data.outputDatasetTag = 'TagAndProbe_SingleMu_' + sample.split("/")[2] - - # lumiMasks for Data samples - if "Run2016" in sample: - config.Data.lumiMask = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/ReReco/Final/Cert_271036-284044_13TeV_23Sep2016ReReco_Collisions16_JSON.txt' - elif "Run2017" in sample: - config.Data.lumiMask = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions17/13TeV/Final/Cert_294927-306462_13TeV_PromptReco_Collisions17_JSON.txt' - elif "Run2018" in sample: - config.Data.lumiMask = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/PromptReco/Cert_314472-325175_13TeV_PromptReco_Collisions18_JSON.txt' - - # RunRange needs to be given if not all run of the given era are not included in the JSON file - #if "Run2017F" in sample: - # config.Data.runRange = '305040-306462' - - - print "=========================================================================" - print "The sample[",index,"]:" , sample, " is being submitted to the crab" - print "=========================================================================" - - p = Process(target=submit, args=(config,)) - p.start() - p.join() - - - diff --git a/TauTagAndProbe/test/crab3_config.py b/TauTagAndProbe/test/crab3_config.py deleted file mode 100644 index d7cf2487a44..00000000000 --- a/TauTagAndProbe/test/crab3_config.py +++ /dev/null @@ -1,29 +0,0 @@ -# TEMPLATE used for automatic script submission of multiple datasets - -from WMCore.Configuration import Configuration -config = Configuration() - -config.section_("General") -config.General.requestName = 'TagAndProbe_SingleMu297050' -config.General.workArea = 'DefaultCrab3Area' - -config.section_("JobType") -config.JobType.pluginName = 'Analysis' -config.JobType.psetName = 'test_SingleMu297050.py' - -config.section_("Data") -config.Data.inputDataset = '/SingleMuon/Run2017B-PromptReco-v1/MINIAOD' -config.Data.inputDBS = 'global' -config.Data.splitting = 'EventAwareLumiBased' -config.Data.unitsPerJob = 10000#number of events per jobs -config.Data.totalUnits = -1 #number of event -config.Data.outLFNDirBase = '/store/user/tstreble/TagAndProbeTrees' -config.Data.publication = False -config.Data.outputDatasetTag = 'TagAndProbe_SingleMu297050' -#config.Data.lumiMask = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/Cert_271036-275125_13TeV_PromptReco_Collisions16_JSON.txt' -# json with 3.99/fb -config.Data.runRange = '297050' - -config.section_("Site") -config.Site.storageSite = 'T2_FR_GRIF_LLR' - diff --git a/TauTagAndProbe/test/das_client.py b/TauTagAndProbe/test/das_client.py deleted file mode 100755 index d66df6f03c0..00000000000 --- a/TauTagAndProbe/test/das_client.py +++ /dev/null @@ -1,551 +0,0 @@ -#!/usr/bin/env python -#-*- coding: utf-8 -*- -#pylint: disable=C0301,C0103,R0914,R0903 - -""" -DAS command line tool -""" -from __future__ import print_function -__author__ = "Valentin Kuznetsov" - -# system modules -import os -import sys -import pwd -if sys.version_info < (2, 6): - raise Exception("DAS requires python 2.6 or greater") - -DAS_CLIENT = 'das-client/1.1::python/%s.%s' % sys.version_info[:2] - -import os -import re -import ssl -import time -import json -import urllib -import urllib2 -import httplib -import cookielib -from optparse import OptionParser -from math import log -from types import GeneratorType - -# define exit codes according to Linux sysexists.h -EX_OK = 0 # successful termination -EX__BASE = 64 # base value for error messages -EX_USAGE = 64 # command line usage error -EX_DATAERR = 65 # data format error -EX_NOINPUT = 66 # cannot open input -EX_NOUSER = 67 # addressee unknown -EX_NOHOST = 68 # host name unknown -EX_UNAVAILABLE = 69 # service unavailable -EX_SOFTWARE = 70 # internal software error -EX_OSERR = 71 # system error (e.g., can't fork) -EX_OSFILE = 72 # critical OS file missing -EX_CANTCREAT = 73 # can't create (user) output file -EX_IOERR = 74 # input/output error -EX_TEMPFAIL = 75 # temp failure; user is invited to retry -EX_PROTOCOL = 76 # remote error in protocol -EX_NOPERM = 77 # permission denied -EX_CONFIG = 78 # configuration error - -class HTTPSClientAuthHandler(urllib2.HTTPSHandler): - """ - Simple HTTPS client authentication class based on provided - key/ca information - """ - def __init__(self, key=None, cert=None, capath=None, level=0): - if level > 1: - urllib2.HTTPSHandler.__init__(self, debuglevel=1) - else: - urllib2.HTTPSHandler.__init__(self) - self.key = key - self.cert = cert - self.capath = capath - - def https_open(self, req): - """Open request method""" - #Rather than pass in a reference to a connection class, we pass in - # a reference to a function which, for all intents and purposes, - # will behave as a constructor - return self.do_open(self.get_connection, req) - - def get_connection(self, host, timeout=300): - """Connection method""" - if self.key and self.cert and not self.capath: - return httplib.HTTPSConnection(host, key_file=self.key, - cert_file=self.cert) - elif self.cert and self.capath: - context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) - context.load_verify_locations(capath=self.capath) - context.load_cert_chain(self.cert) - return httplib.HTTPSConnection(host, context=context) - return httplib.HTTPSConnection(host) - -def x509(): - "Helper function to get x509 either from env or tmp file" - proxy = os.environ.get('X509_USER_PROXY', '') - if not proxy: - proxy = '/tmp/x509up_u%s' % pwd.getpwuid( os.getuid() ).pw_uid - if not os.path.isfile(proxy): - return '' - return proxy - -def check_glidein(): - "Check glideine environment and exit if it is set" - glidein = os.environ.get('GLIDEIN_CMSSite', '') - if glidein: - msg = "ERROR: das_client is running from GLIDEIN environment, it is prohibited" - print(msg) - sys.exit(EX__BASE) - -def check_auth(key): - "Check if user runs das_client with key/cert and warn users to switch" - if not key: - msg = "WARNING: das_client is running without user credentials/X509 proxy, create proxy via 'voms-proxy-init -voms cms -rfc'" - print(msg, file=sys.stderr) - -class DASOptionParser: - """ - DAS cache client option parser - """ - def __init__(self): - usage = "Usage: %prog [options]\n" - usage += "For more help please visit https://cmsweb.cern.ch/das/faq" - self.parser = OptionParser(usage=usage) - self.parser.add_option("-v", "--verbose", action="store", - type="int", default=0, dest="verbose", - help="verbose output") - self.parser.add_option("--query", action="store", type="string", - default=False, dest="query", - help="specify query for your request") - msg = "host name of DAS cache server, default is https://cmsweb.cern.ch" - self.parser.add_option("--host", action="store", type="string", - default='https://cmsweb.cern.ch', dest="host", help=msg) - msg = "start index for returned result set, aka pagination," - msg += " use w/ limit (default is 0)" - self.parser.add_option("--idx", action="store", type="int", - default=0, dest="idx", help=msg) - msg = "number of returned results (default is 10)," - msg += " use --limit=0 to show all results" - self.parser.add_option("--limit", action="store", type="int", - default=10, dest="limit", help=msg) - msg = 'specify return data format (json or plain), default plain.' - self.parser.add_option("--format", action="store", type="string", - default="plain", dest="format", help=msg) - msg = 'query waiting threshold in sec, default is 5 minutes' - self.parser.add_option("--threshold", action="store", type="int", - default=300, dest="threshold", help=msg) - msg = 'specify private key file name, default $X509_USER_PROXY' - self.parser.add_option("--key", action="store", type="string", - default=x509(), dest="ckey", help=msg) - msg = 'specify private certificate file name, default $X509_USER_PROXY' - self.parser.add_option("--cert", action="store", type="string", - default=x509(), dest="cert", help=msg) - msg = 'specify CA path, default $X509_CERT_DIR' - self.parser.add_option("--capath", action="store", type="string", - default=os.environ.get("X509_CERT_DIR", ""), - dest="capath", help=msg) - msg = 'specify number of retries upon busy DAS server message' - self.parser.add_option("--retry", action="store", type="string", - default=0, dest="retry", help=msg) - msg = 'show DAS headers in JSON format' - msg += ' (obsolete, keep for backward compatibility)' - self.parser.add_option("--das-headers", action="store_true", - default=False, dest="das_headers", help=msg) - msg = 'specify power base for size_format, default is 10 (can be 2)' - self.parser.add_option("--base", action="store", type="int", - default=0, dest="base", help=msg) - - msg = 'a file which contains a cached json dictionary for query -> files mapping' - self.parser.add_option("--cache", action="store", type="string", - default=None, dest="cache", help=msg) - - msg = 'List DAS key/attributes, use "all" or specific DAS key value, e.g. site' - self.parser.add_option("--list-attributes", action="store", type="string", - default="", dest="keys_attrs", help=msg) - def get_opt(self): - """ - Returns parse list of options - """ - return self.parser.parse_args() - -def convert_time(val): - "Convert given timestamp into human readable format" - if isinstance(val, int) or isinstance(val, float): - return time.strftime('%d/%b/%Y_%H:%M:%S_GMT', time.gmtime(val)) - return val - -def size_format(uinput, ibase=0): - """ - Format file size utility, it converts file size into KB, MB, GB, TB, PB units - """ - if not ibase: - return uinput - try: - num = float(uinput) - except Exception as _exc: - return uinput - if ibase == 2.: # power of 2 - base = 1024. - xlist = ['', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] - else: # default base is 10 - base = 1000. - xlist = ['', 'KB', 'MB', 'GB', 'TB', 'PB'] - for xxx in xlist: - if num < base: - return "%3.1f%s" % (num, xxx) - num /= base - -def unique_filter(rows): - """ - Unique filter drop duplicate rows. - """ - old_row = {} - row = None - for row in rows: - row_data = dict(row) - try: - del row_data['_id'] - del row_data['das'] - del row_data['das_id'] - del row_data['cache_id'] - except: - pass - old_data = dict(old_row) - try: - del old_data['_id'] - del old_data['das'] - del old_data['das_id'] - del old_data['cache_id'] - except: - pass - if row_data == old_data: - continue - if old_row: - yield old_row - old_row = row - yield row - -def extract_value(row, key): - """Generator which extracts row[key] value""" - if isinstance(row, dict) and key in row: - if key == 'creation_time': - row = convert_time(row[key]) - elif key == 'size': - row = size_format(row[key], base) - else: - row = row[key] - yield row - if isinstance(row, list) or isinstance(row, GeneratorType): - for item in row: - for vvv in extract_value(item, key): - yield vvv - -def get_value(data, filters, base=10): - """Filter data from a row for given list of filters""" - for ftr in filters: - if ftr.find('>') != -1 or ftr.find('<') != -1 or ftr.find('=') != -1: - continue - row = dict(data) - values = [] - keys = ftr.split('.') - for key in keys: - val = [v for v in extract_value(row, key)] - if key == keys[-1]: # we collect all values at last key - values += [json.dumps(i) for i in val] - else: - row = val - if len(values) == 1: - yield values[0] - else: - yield values - -def fullpath(path): - "Expand path to full path" - if path and path[0] == '~': - path = path.replace('~', '') - path = path[1:] if path[0] == '/' else path - path = os.path.join(os.environ['HOME'], path) - return path - -def get_data(host, query, idx, limit, debug, threshold=300, ckey=None, - cert=None, capath=None, das_headers=True): - """Contact DAS server and retrieve data for given DAS query""" - params = {'input':query, 'idx':idx, 'limit':limit} - path = '/das/cache' - pat = re.compile('http[s]{0,1}://') - if not pat.match(host): - msg = 'Invalid hostname: %s' % host - raise Exception(msg) - url = host + path - client = '%s (%s)' % (DAS_CLIENT, os.environ.get('USER', '')) - headers = {"Accept": "application/json", "User-Agent": client} - encoded_data = urllib.urlencode(params, doseq=True) - url += '?%s' % encoded_data - req = urllib2.Request(url=url, headers=headers) - if ckey and cert: - ckey = fullpath(ckey) - cert = fullpath(cert) - http_hdlr = HTTPSClientAuthHandler(ckey, cert, capath, debug) - elif cert and capath: - cert = fullpath(cert) - http_hdlr = HTTPSClientAuthHandler(ckey, cert, capath, debug) - else: - http_hdlr = urllib2.HTTPHandler(debuglevel=debug) - proxy_handler = urllib2.ProxyHandler({}) - cookie_jar = cookielib.CookieJar() - cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar) - opener = urllib2.build_opener(http_hdlr, proxy_handler, cookie_handler) - fdesc = opener.open(req) - data = fdesc.read() - fdesc.close() - - pat = re.compile(r'^[a-z0-9]{32}') - if data and isinstance(data, str) and pat.match(data) and len(data) == 32: - pid = data - else: - pid = None - iwtime = 2 # initial waiting time in seconds - wtime = 20 # final waiting time in seconds - sleep = iwtime - time0 = time.time() - while pid: - params.update({'pid':data}) - encoded_data = urllib.urlencode(params, doseq=True) - url = host + path + '?%s' % encoded_data - req = urllib2.Request(url=url, headers=headers) - try: - fdesc = opener.open(req) - data = fdesc.read() - fdesc.close() - except urllib2.HTTPError as err: - return {"status":"fail", "reason":str(err)} - if data and isinstance(data, str) and pat.match(data) and len(data) == 32: - pid = data - else: - pid = None - time.sleep(sleep) - if sleep < wtime: - sleep *= 2 - elif sleep == wtime: - sleep = iwtime # start new cycle - else: - sleep = wtime - if (time.time()-time0) > threshold: - reason = "client timeout after %s sec" % int(time.time()-time0) - return {"status":"fail", "reason":reason} - jsondict = json.loads(data) - return jsondict - -def prim_value(row): - """Extract primary key value from DAS record""" - prim_key = row['das']['primary_key'] - if prim_key == 'summary': - return row.get(prim_key, None) - key, att = prim_key.split('.') - if isinstance(row[key], list): - for item in row[key]: - if att in item: - return item[att] - else: - if key in row: - if att in row[key]: - return row[key][att] - -def print_summary(rec): - "Print summary record information on stdout" - if 'summary' not in rec: - msg = 'Summary information is not found in record:\n', rec - raise Exception(msg) - for row in rec['summary']: - keys = [k for k in row.keys()] - maxlen = max([len(k) for k in keys]) - for key, val in row.items(): - pkey = '%s%s' % (key, ' '*(maxlen-len(key))) - print('%s: %s' % (pkey, val)) - print() - -def print_from_cache(cache, query): - "print the list of files reading it from cache" - data = open(cache).read() - jsondict = json.loads(data) - if query in jsondict: - print("\n".join(jsondict[query])) - exit(0) - exit(1) - -def keys_attrs(lkey, oformat, host, ckey, cert, debug=0): - "Contact host for list of key/attributes pairs" - url = '%s/das/keys?view=json' % host - headers = {"Accept": "application/json", "User-Agent": DAS_CLIENT} - req = urllib2.Request(url=url, headers=headers) - if ckey and cert: - ckey = fullpath(ckey) - cert = fullpath(cert) - http_hdlr = HTTPSClientAuthHandler(ckey, cert, debug) - else: - http_hdlr = urllib2.HTTPHandler(debuglevel=debug) - proxy_handler = urllib2.ProxyHandler({}) - cookie_jar = cookielib.CookieJar() - cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar) - opener = urllib2.build_opener(http_hdlr, proxy_handler, cookie_handler) - fdesc = opener.open(req) - data = json.load(fdesc) - fdesc.close() - if oformat.lower() == 'json': - if lkey == 'all': - print(json.dumps(data)) - else: - print(json.dumps({lkey:data[lkey]})) - return - for key, vdict in data.items(): - if lkey == 'all': - pass - elif lkey != key: - continue - print() - print("DAS key:", key) - for attr, examples in vdict.items(): - prefix = ' ' - print('%s%s' % (prefix, attr)) - for item in examples: - print('%s%s%s' % (prefix, prefix, item)) - -def main(): - """Main function""" - optmgr = DASOptionParser() - opts, _ = optmgr.get_opt() - host = opts.host - debug = opts.verbose - query = opts.query - idx = opts.idx - limit = opts.limit - thr = opts.threshold - ckey = opts.ckey - cert = opts.cert - capath = opts.capath - base = opts.base - check_glidein() - check_auth(ckey) - if opts.keys_attrs: - keys_attrs(opts.keys_attrs, opts.format, host, ckey, cert, debug) - return - if not query: - print('Input query is missing') - sys.exit(EX_USAGE) - if opts.format == 'plain': - jsondict = get_data(host, query, idx, limit, debug, thr, ckey, cert, capath) - cli_msg = jsondict.get('client_message', None) - if cli_msg: - print("DAS CLIENT WARNING: %s" % cli_msg) - if 'status' not in jsondict and opts.cache: - print_from_cache(opts.cache, query) - if 'status' not in jsondict: - print('DAS record without status field:\n%s' % jsondict) - sys.exit(EX_PROTOCOL) - if jsondict["status"] != 'ok' and opts.cache: - print_from_cache(opts.cache, query) - if jsondict['status'] != 'ok': - print("status: %s, reason: %s" \ - % (jsondict.get('status'), jsondict.get('reason', 'N/A'))) - if opts.retry: - found = False - for attempt in xrange(1, int(opts.retry)): - interval = log(attempt)**5 - print("Retry in %5.3f sec" % interval) - time.sleep(interval) - data = get_data(host, query, idx, limit, debug, thr, ckey, cert, capath) - jsondict = json.loads(data) - if jsondict.get('status', 'fail') == 'ok': - found = True - break - else: - sys.exit(EX_TEMPFAIL) - if not found: - sys.exit(EX_TEMPFAIL) - nres = jsondict.get('nresults', 0) - if not limit: - drange = '%s' % nres - else: - drange = '%s-%s out of %s' % (idx+1, idx+limit, nres) - if opts.limit: - msg = "\nShowing %s results" % drange - msg += ", for more results use --idx/--limit options\n" - print(msg) - mongo_query = jsondict.get('mongo_query', {}) - unique = False - fdict = mongo_query.get('filters', {}) - filters = fdict.get('grep', []) - aggregators = mongo_query.get('aggregators', []) - if 'unique' in fdict.keys(): - unique = True - if filters and not aggregators: - data = jsondict['data'] - if isinstance(data, dict): - rows = [r for r in get_value(data, filters, base)] - print(' '.join(rows)) - elif isinstance(data, list): - if unique: - data = unique_filter(data) - for row in data: - rows = [r for r in get_value(row, filters, base)] - types = [type(r) for r in rows] - if len(types)>1: # mixed types print as is - print(' '.join([str(r) for r in rows])) - elif isinstance(rows[0], list): - out = set() - for item in rows: - for elem in item: - out.add(elem) - print(' '.join(out)) - else: - print(' '.join(rows)) - else: - print(json.dumps(jsondict)) - elif aggregators: - data = jsondict['data'] - if unique: - data = unique_filter(data) - for row in data: - if row['key'].find('size') != -1 and \ - row['function'] == 'sum': - val = size_format(row['result']['value'], base) - else: - val = row['result']['value'] - print('%s(%s)=%s' \ - % (row['function'], row['key'], val)) - else: - data = jsondict['data'] - if isinstance(data, list): - old = None - val = None - for row in data: - prim_key = row.get('das', {}).get('primary_key', None) - if prim_key == 'summary': - print_summary(row) - return - val = prim_value(row) - if not opts.limit: - if val != old: - print(val) - old = val - else: - print(val) - if val != old and not opts.limit: - print(val) - elif isinstance(data, dict): - print(prim_value(data)) - else: - print(data) - else: - jsondict = get_data(\ - host, query, idx, limit, debug, thr, ckey, cert, capath) - print(json.dumps(jsondict)) - -# -# main -# -if __name__ == '__main__': - main() - diff --git a/TauTagAndProbe/test/fitter/Makefile b/TauTagAndProbe/test/fitter/Makefile deleted file mode 100644 index 83730832b3b..00000000000 --- a/TauTagAndProbe/test/fitter/Makefile +++ /dev/null @@ -1,48 +0,0 @@ -#makefile - - -CC = g++ - -#UCFLAGS = -O0 -g3 -Wall -gstabs+ -UCFLAGS = -O3 -Wall -gstabs+ -std=c++0x - - -RUCFLAGS := $(shell root-config --cflags) -I${ROOFITSYS}/include/ -I./include/ -LIBS := $(shell root-config --libs) -lTreePlayer -L${ROOFITSYS}/lib/ -lRooFit -lRooFitCore ./obj/FuncCB_cdf_cpp.so -GLIBS := $(shell root-config --glibs) - -VPATH = ./src/ - -SRCPP = main.cpp\ - Utilities.cpp\ - TurnonFit.cpp\ - TurnonManager.cpp - - - - -#OBJCPP = $(SRCPP:.cpp=.o) -OBJCPP = $(patsubst %.cpp,obj/%.o,$(SRCPP)) - - -all : obj/FuncCB_cdf_cpp.so fit.exe - -obj/%.o : %.cpp - @mkdir -p obj/ - @echo compiling $* - @$(CC) -c $< $(UCFLAGS) $(RUCFLAGS) -o $@ - -fit.exe : $(OBJCPP) - @echo linking - @$(CC) $^ $(ACLIBS) $(LIBS) $(GLIBS) -o $@ - -obj/FuncCB_cdf_cpp.so: include/FuncCB_cdf.cpp createCBLib.sh - sh createCBLib.sh - -clean: - @rm -f obj/*.o - @rm -f fit.exe - -cleanall: clean - @rm obj/FuncCB_cdf_cpp.so - diff --git a/TauTagAndProbe/test/fitter/createCBLib.sh b/TauTagAndProbe/test/fitter/createCBLib.sh deleted file mode 100644 index 5885b6f39d3..00000000000 --- a/TauTagAndProbe/test/fitter/createCBLib.sh +++ /dev/null @@ -1,2 +0,0 @@ -cd include && root -b -q loadRooFit.C FuncCB_cdf.cpp++ -mv FuncCB_cdf_cpp.so ../obj diff --git a/TauTagAndProbe/test/fitter/include/.gitignore b/TauTagAndProbe/test/fitter/include/.gitignore deleted file mode 100644 index 7ec5db4155f..00000000000 --- a/TauTagAndProbe/test/fitter/include/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.pcm -*.d diff --git a/TauTagAndProbe/test/fitter/include/FuncCB.cpp b/TauTagAndProbe/test/fitter/include/FuncCB.cpp deleted file mode 100644 index f99f22189e5..00000000000 --- a/TauTagAndProbe/test/fitter/include/FuncCB.cpp +++ /dev/null @@ -1,123 +0,0 @@ - /***************************************************************************** - * Project: RooFit * - * * - * This code was autogenerated by RooClassFactory * - *****************************************************************************/ - - // Your description goes here... - -// #include "Riostream.h" - -#include "FuncCB.h" -//#include "RooAbsReal.h" -//#include "/opt/exp_soft/cms/slc5_ia32_gcc434/lcg/roofit/5.25.02-cms6/include/RooAbsReal.h" -//#include "/opt/exp_soft/cms/slc5_ia32_gcc434/lcg/roofit/5.25.02-cms6/include/RooAbsCategory.h" -//#include "RooAbsCategory.h" -//#include "RooMath.h" - -ClassImp(FuncCB) - - FuncCB::FuncCB(const char *name, const char *title, - RooAbsReal& _m, - RooAbsReal& _m0, - RooAbsReal& _sigma, - RooAbsReal& _alpha, - RooAbsReal& _n, - RooAbsReal& _norm, - RooAbsReal& _mturn, - RooAbsReal& _p, - RooAbsReal& _width) : - RooAbsReal(name,title), - m("m","m",this,_m), - m0("m0","m0",this,_m0), - sigma("sigma","sigma",this,_sigma), - alpha("alpha","alpha",this,_alpha), - n("n","n",this,_n), - norm("norm","norm",this,_norm), - mturn("mturn","mturn",this,_mturn), - p("p","p",this,_p), - width("width","width",this,_width) -{ -} - - -FuncCB::FuncCB(const FuncCB& other, const char* name) : - RooAbsReal(other,name), - m("m",this,other.m), - m0("m0",this,other.m0), - sigma("sigma",this,other.sigma), - alpha("alpha",this,other.alpha), - n("n",this,other.n), - norm("norm",this,other.norm), - mturn("mturn",this,other.mturn), - p("p",this,other.p), - width("width",this,other.width) -{ -} - - -Double_t FuncCB::valeur(Double_t et) -{ - m = et; - return evaluate(); -} - -Double_t FuncCB::evaluate() const -{ - const double sqrtPiOver2 = 1.2533141373; // sqrt(pi/2) - const double sqrt2 = 1.4142135624; - - Double_t sig = fabs((Double_t) sigma); - - Double_t t = (m - m0)/sig ; - - if (alpha < 0) - t = -t; - - Double_t absAlpha = fabs(alpha / sig); - Double_t a = TMath::Power(n/absAlpha,n)*exp(-0.5*absAlpha*absAlpha); - Double_t b = absAlpha - n/absAlpha; - - //cout << a << " " << b << endl; - - ////// Pour la crystal ball - // if (t <= absAlpha){ - // return norm * exp(-0.5*t*t); - // } - // else - // { - // return norm * a * TMath::Power(t-b,-n) ; - // } - - Double_t aireGauche = (1 + ApproxErf( absAlpha / sqrt2 )) * sqrtPiOver2 ; - Double_t aireDroite = ( a * 1/TMath::Power(absAlpha - b,n-1)) / (n - 1); - Double_t aire = aireGauche + aireDroite; - - //Arctan part - Double_t Linear = 0.; - if(m=mturn) Linear = pow(ApproxErf((m-mturn)/5.),2)*2.*(1.-p)/3.14159*TMath::ATan(3.14159/80.*width*(m-mturn))+p; - - if ( t <= absAlpha ){ - //return norm * (1 + ApproxErf( t / sqrt2 )) * sqrtPiOver2 / aire ; - return norm * (1 + ApproxErf( t / sqrt2 )) * sqrtPiOver2 / aire * Linear ; - } - else{ - //return norm * (aireGauche + a * (1/TMath::Power(t-b,n-1) - 1/TMath::Power(absAlpha - b,n-1)) / (1 - n)) / aire ; - return norm * (aireGauche + a * (1/TMath::Power(t-b,n-1) - 1/TMath::Power(absAlpha - b,n-1)) / (1 - n)) / aire * Linear ; - } - - } - - -//_____________________________________________________________________________ -Double_t FuncCB::ApproxErf(Double_t arg) const -{ - static const double erflim = 5.0; - if( arg > erflim ) - return 1.0; - if( arg < -erflim ) - return -1.0; - - return RooMath::erf(arg); -} diff --git a/TauTagAndProbe/test/fitter/include/FuncCB.h b/TauTagAndProbe/test/fitter/include/FuncCB.h deleted file mode 100644 index 7858012e18d..00000000000 --- a/TauTagAndProbe/test/fitter/include/FuncCB.h +++ /dev/null @@ -1,52 +0,0 @@ -#ifndef FUNCCB -#define FUNCCB - -#include "TMath.h" -#include - -#include "RooAbsReal.h" -#include "RooRealProxy.h" -#include "RooCategoryProxy.h" -#include "RooAbsReal.h" -#include "RooAbsCategory.h" -#include "RooMath.h" - -class FuncCB : public RooAbsReal { -public: - FuncCB() {} ; - FuncCB(const char *name, const char *title, - RooAbsReal& _m, - RooAbsReal& _m0, - RooAbsReal& _sigma, - RooAbsReal& _alpha, - RooAbsReal& _n, - RooAbsReal& _norm, - RooAbsReal& _mturn, - RooAbsReal& _p, - RooAbsReal& _width); - FuncCB(const FuncCB& other, const char* name=0) ; - - Double_t evaluate() const; - Double_t valeur(Double_t et); - - virtual TObject* clone(const char* newname) const { return new FuncCB(*this,newname); } - inline virtual ~FuncCB() { } - -protected: - Double_t ApproxErf(Double_t arg) const ; - RooRealProxy m ; - RooRealProxy m0 ; - RooRealProxy sigma ; - RooRealProxy alpha ; - RooRealProxy n ; - RooRealProxy norm ; - RooRealProxy mturn; - RooRealProxy p; - RooRealProxy width; - -private: - - ClassDef(FuncCB,1) // Your description goes here... -}; - -#endif diff --git a/TauTagAndProbe/test/fitter/include/FuncCB_cdf.cpp b/TauTagAndProbe/test/fitter/include/FuncCB_cdf.cpp deleted file mode 100644 index 142f03301ca..00000000000 --- a/TauTagAndProbe/test/fitter/include/FuncCB_cdf.cpp +++ /dev/null @@ -1,149 +0,0 @@ -1;95;0c /***************************************************************************** - * Project: RooFit * - * * - * The skeleton of this code is taken from FuncCB class * - *****************************************************************************/ - - // Your description goes here... - -// #include "Riostream.h" - -#include "FuncCB_cdf.h" - - -ClassImp(FuncCB_cdf) - - FuncCB_cdf::FuncCB_cdf(const char *name, const char *title, - RooAbsReal& _m, - RooAbsReal& _m0, - RooAbsReal& _sigma, - RooAbsReal& _alpha, - RooAbsReal& _n, - RooAbsReal& _norm, - RooAbsReal& _yrise) : - RooAbsReal(name,title), - m("m","m",this,_m), - m0("m0","m0",this,_m0), - sigma("sigma","sigma",this,_sigma), - alpha("alpha","alpha",this,_alpha), - n("n","n",this,_n), - norm("norm","norm",this,_norm), - yrise("yrise","yrise",this,_yrise) -{ -} - - -FuncCB_cdf::FuncCB_cdf(const FuncCB_cdf& other, const char* name) : - RooAbsReal(other,name), - m("m",this,other.m), - m0("m0",this,other.m0), - sigma("sigma",this,other.sigma), - alpha("alpha",this,other.alpha), - n("n",this,other.n), - norm("norm",this,other.norm), - yrise("yrise",this,other.yrise) -{ -} - - -Double_t FuncCB_cdf::valeur(Double_t et) -{ - m = et; - return evaluate(); -} - -Double_t FuncCB_cdf::evaluate() const -{ - if (n <= 1.){ - MATH_ERROR_MSG("crystalball_cdf","CrystalBall cdf not defined for n <=1"); - return std::numeric_limits::quiet_NaN(); - } - static const double kSqrt2 = 1.41421356237309515; // sqrt(2.) - double abs_alpha = std::abs(alpha); - double C = n/abs_alpha * 1./(n-1.) * std::exp(-alpha*alpha/2.); - double D = std::sqrt(M_PI/2.)*(1.+ RooMath::erf(abs_alpha/std::sqrt(2.))); - double totIntegral = sigma*(C+D); - - double integral = crystalball_integral(-m, alpha, n, sigma, m0); - return (alpha > 0) ? yrise -(1. - integral/totIntegral)*(norm) : yrise - (integral/totIntegral)*(norm); - } - - -//_____________________________________________________________________________ -Double_t FuncCB_cdf::crystalball_integral(Double_t m, Double_t alpha, Double_t n, Double_t sigma, Double_t m0) const - { - // compute the integral of the crystal ball function (ROOT::Math::crystalball_function) - // If alpha > 0 the integral is the right tail integral. - // If alpha < 0 is the left tail integrals which are always finite for finite x. - // parameters: - // alpha : is non equal to zero, define the # of sigma from which it becomes a power-law function (from mean-alpha*sigma) - // n > 1 : is integrer, is the power of the low tail - // add a value xmin for cases when n <=1 the integral diverges - if (sigma == 0) return 0; - if (alpha==0) - { - MATH_ERROR_MSG("crystalball_integral","CrystalBall function not defined at alpha=0"); - return 0.; - } - bool useLog = (n == 1.0); - if (n<=0) MATH_WARN_MSG("crystalball_integral","No physical meaning when n<=0"); - - double z = (m-m0)/sigma; - if (alpha < 0 ) z = -z; - - double abs_alpha = std::abs(alpha); - - //double D = *(1.+ROOT::Math::erf(abs_alpha/std::sqrt(2.))); - //double N = 1./(sigma*(C+D)); - double intgaus = 0.; - double intpow = 0.; - - const double sqrtpiover2 = std::sqrt(M_PI/2.); - const double sqrt2pi = std::sqrt( 2.*M_PI); - const double oneoversqrt2 = 1./sqrt(2.); - if (z <= -abs_alpha) - { - double A = std::pow(n/abs_alpha,n) * std::exp(-0.5 * alpha*alpha); - double B = n/abs_alpha - abs_alpha; - - if (!useLog) { - double C = (n/abs_alpha) * (1./(n-1)) * std::exp(-alpha*alpha/2.); - intpow = C - A /(n-1.) * std::pow(B-z,-n+1) ; - } - else { - // for n=1 the primitive of 1/x is log(x) - intpow = -A * std::log( n / abs_alpha ) + A * std::log( B -z ); - } - intgaus = sqrtpiover2*(1.+RooMath::erf(abs_alpha*oneoversqrt2)); - } - else - { - intgaus = gaussian_cdf_c(z, 1, 0); - intgaus *= sqrt2pi; - intpow = 0; - } - return sigma * (intgaus + intpow); - } - - -//_____________________________________________________________________________ -Double_t FuncCB_cdf::gaussian_cdf_c(double m, double sigma, double m0) const - { - static const double kSqrt2 = 1.41421356237309515; // sqrt(2.) - double z = (m-m0)/(sigma*kSqrt2); - if (z > 1.) return 0.5*RooMath::erfc(z); - else return 0.5*(1.-RooMath::erf(z)); - } - - -//_____________________________________________________________________________ -Double_t FuncCB_cdf::ApproxErf(Double_t arg) const -{ - static const double erflim = 5.0; - if( arg > erflim ) - return 1.0; - if( arg < -erflim ) - return -1.0; - - return RooMath::erf(arg); -} diff --git a/TauTagAndProbe/test/fitter/include/FuncCB_cdf.h b/TauTagAndProbe/test/fitter/include/FuncCB_cdf.h deleted file mode 100644 index 3b1018759d5..00000000000 --- a/TauTagAndProbe/test/fitter/include/FuncCB_cdf.h +++ /dev/null @@ -1,51 +0,0 @@ -#ifndef FUNCCB_CDF -#define FUNCCB_CDF - -#include "TMath.h" - #include "Math/Error.h" -#include - -#include "RooAbsReal.h" -#include "RooRealProxy.h" -#include "RooCategoryProxy.h" -#include "RooAbsReal.h" -#include "RooAbsCategory.h" -#include "RooMath.h" - -class FuncCB_cdf : public RooAbsReal { -public: - FuncCB_cdf() {} ; - FuncCB_cdf(const char *name, const char *title, - RooAbsReal& _m, - RooAbsReal& _m0, - RooAbsReal& _sigma, - RooAbsReal& _alpha, - RooAbsReal& _n, - RooAbsReal& _norm, - RooAbsReal& _yrise); - FuncCB_cdf(const FuncCB_cdf& other, const char* name=0) ; - - Double_t evaluate() const; - Double_t valeur(Double_t et); - - virtual TObject* clone(const char* newname) const { return new FuncCB_cdf(*this,newname); } - inline virtual ~FuncCB_cdf() { } - -protected: - Double_t ApproxErf(Double_t arg) const ; - Double_t crystalball_integral(Double_t m, Double_t alpha, Double_t n, Double_t sigma, Double_t m0) const; - Double_t gaussian_cdf_c(double m, double sigma, double m0) const; - RooRealProxy m ; - RooRealProxy m0 ; - RooRealProxy sigma ; - RooRealProxy alpha ; - RooRealProxy n ; - RooRealProxy norm ; - RooRealProxy yrise; - -private: - - ClassDef(FuncCB_cdf,1) // Your description goes here... -}; - -#endif diff --git a/TauTagAndProbe/test/fitter/include/FuncCB_cpp.d b/TauTagAndProbe/test/fitter/include/FuncCB_cpp.d deleted file mode 100644 index d500f3ea4ac..00000000000 --- a/TauTagAndProbe/test/fitter/include/FuncCB_cpp.d +++ /dev/null @@ -1,202 +0,0 @@ - -# DO NOT DELETE - -./FuncCB_cpp.so: FuncCB.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TMath.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/Rtypes.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RtypesCore.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RConfig.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RVersion.h -./FuncCB_cpp.so: /usr/include/features.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/DllImport.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/snprintf.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/strlcpy.h -./FuncCB_cpp.so: /usr/include/unistd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/atomic -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/c++0x_warning.h -./FuncCB_cpp.so: /usr/include/stdio.h /usr/include/string.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/typeinfo -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/exception -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/c++config.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/os_defines.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/cpu_defines.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/atomic_lockfree_defines.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TGenericClassInfo.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/vector -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_algobase.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/functexcept.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/exception_defines.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/cpp_type_traits.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ext/type_traits.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ext/numeric_traits.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_pair.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/move.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/concept_check.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_iterator_base_types.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_iterator_base_funcs.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/debug/assertions.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_iterator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/ptr_traits.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/debug/debug.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/predefined_ops.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/allocator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/c++allocator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ext/new_allocator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/new -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/memoryfwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_construct.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ext/alloc_traits.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_uninitialized.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_vector.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_bvector.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/range_access.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/vector.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TSchemaHelper.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/string -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stringfwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/char_traits.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/postypes.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/cwchar -./FuncCB_cpp.so: /usr/include/wchar.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/localefwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/c++locale.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/clocale -./FuncCB_cpp.so: /usr/include/locale.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/iosfwd -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/cctype -./FuncCB_cpp.so: /usr/include/ctype.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/ostream_insert.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/cxxabi_forced.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_function.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/backward/binders.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/basic_string.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ext/atomicity.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/gthr.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/gthr-default.h -./FuncCB_cpp.so: /usr/include/pthread.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/atomic_word.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/basic_string.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TIsAProxy.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TVirtualIsAProxy.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TMathBase.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/cstdlib -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/stdlib.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/cmath -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/math.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TError.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/algorithm -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/utility -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_relops.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_algo.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/algorithmfwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_heap.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_tempbuf.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/limits -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsReal.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsArg.h -./FuncCB_cpp.so: /usr/include/assert.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TNamed.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TObject.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RConfigure.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TStorage.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TVersionCheck.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TString.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RStringView.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/experimental/string_view -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/c++14_warning.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/THashList.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TList.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TSeqCollection.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TCollection.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TIterator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/iterator -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ostream -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/ios -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/ios_base.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/locale_classes.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/locale_classes.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/stdexcept -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/streambuf -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/streambuf.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/basic_ios.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/locale_facets.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/cwctype -./FuncCB_cpp.so: /usr/include/wctype.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/ctype_base.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/streambuf_iterator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/x86_64-unknown-linux-gnu/bits/ctype_inline.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/locale_facets.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/basic_ios.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/ostream.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/istream -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/istream.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stream_iterator.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TRefArray.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TProcessID.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TObjArray.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/type_traits -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooPrintable.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooRefCountList.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooLinkedList.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/map -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_tree.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_map.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_multimap.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/list -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_list.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/list.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooLinkedListElem.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooHashTable.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsCache.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooLinkedListIter.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooNameReg.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/set -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_set.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_multiset.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/deque -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_deque.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/deque.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/stack -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_stack.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/iostream -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TClass.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TDictionary.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/ESTLType.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TObjString.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/unordered_set -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/ThreadLocalStorage.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooCmdArg.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooCurve.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TGraph.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TAttLine.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TAttFill.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TAttMarker.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TVectorFfwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TVectorDfwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TFitResultPtr.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/memory -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/stl_raw_storage_iter.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/backward/auto_ptr.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooPlotable.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TMatrixDfwd.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooArgSet.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsCollection.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooArgList.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooGlobalFunc.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooRealProxy.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooArgProxy.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsProxy.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsRealLValue.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooNumber.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsLValue.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsBinning.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooCategoryProxy.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsCategory.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooCatType.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooAbsCategoryLValue.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RooMath.h -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/complex -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/sstream -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/external/gcc/6.3.0/include/c++/6.3.0/bits/sstream.tcc -./FuncCB_cpp.so: /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RVersion.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RConfig.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TClass.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TDictAttributeMap.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TInterpreter.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TROOT.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TBuffer.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TMemberInspector.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TError.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/RtypesImp.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TIsAProxy.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TFileMergeInfo.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/lcg/root/6.10.08/include/TCollectionProxyInfo.h /cvmfs/cms.cern.ch/slc6_amd64_gcc630/cms/cmssw/CMSSW_9_4_0/external/slc6_amd64_gcc630/bin/rootcling -FuncCB_cpp__ROOTBUILDVERSION= 6.10/09 diff --git a/TauTagAndProbe/test/fitter/include/TurnonFit.h b/TauTagAndProbe/test/fitter/include/TurnonFit.h deleted file mode 100644 index 62839797839..00000000000 --- a/TauTagAndProbe/test/fitter/include/TurnonFit.h +++ /dev/null @@ -1,117 +0,0 @@ -/** - * @file TurnonFit.h - * @brief - * - * - * @author Jean-Baptiste Sauvan - * - * @date 05/10/2014 - * - * @internal - * Created : 05/10/2014 - * Last update : 05/10/2014 21:00:46 - * by : JB Sauvan - * - * ===================================================================================== - */ - - - -#ifndef TURNONFIT_H -#define TURNONFIT_H - - -#include -#include - -#include "TFile.h" - -#include "RooPlot.h" -#include "RooHist.h" -#include "RooCurve.h" -#include "RooFitResult.h" -#include "RooRealVar.h" -#include "FuncCB.h" -#include "FuncCB_cdf.h" - -class TurnonFit -{ - - public: - TurnonFit(const std::string& name); - ~TurnonFit(); - - - void fit(); - void save(TFile* outputFile); - - void setFileName(const std::string& fileName) {m_fileName = fileName;} - void setTreeName(const std::string& treeName) {m_treeName = treeName;} - void setXVar(const std::string& xVar, double min, double max) {m_xVar.SetName(xVar.c_str()); m_xVar.setRange(min, max);} - void setCut(const std::string& cut) {m_cut = cut;} - void setSelectionVars(const std::vector& selectionVars) {m_selectionVars = selectionVars;} - void setSelection(const std::string& selection) {m_selection = selection;} - void setWeightVar(const std::string& weightVar) {m_weightVar = weightVar;} - void setBinning(const std::vector& binning) {m_binning = binning; - std::cout<<"in include:"< m_selectionVars; - std::string m_weightVar; - std::string m_selection; - std::vector m_binning; - int m_nCPU; - bool m_noFit; - - RooRealVar m_xVar; - RooRealVar m_max; - RooRealVar m_alpha; - RooRealVar m_n; - RooRealVar m_mean; - RooRealVar m_sigma; - RooRealVar m_mturn; - RooRealVar m_p; - RooRealVar m_width; - RooRealVar m_yrise; - - //FuncCB* m_function; - FuncCB_cdf* m_function; - RooPlot* m_plot; - RooHist* m_histo; - RooCurve* m_fit; - RooCurve* m_fitError1Sigma; - RooCurve* m_fitError2Sigma; - RooFitResult* m_fitResult; -}; - - -#endif diff --git a/TauTagAndProbe/test/fitter/include/TurnonManager.h b/TauTagAndProbe/test/fitter/include/TurnonManager.h deleted file mode 100644 index 16954656c3a..00000000000 --- a/TauTagAndProbe/test/fitter/include/TurnonManager.h +++ /dev/null @@ -1,48 +0,0 @@ -/** - * @file TurnonManager.h - * @brief - * - * - * @author Jean-Baptiste Sauvan - * - * @date 06/10/2014 - * - * @internal - * Created : 06/10/2014 - * Last update : 06/10/2014 14:24:37 - * by : JB Sauvan - * - * ===================================================================================== - */ - - - - -#ifndef TURNONMANAGER_H -#define TURNONMANAGER_H - -#include "TFile.h" -#include "TEnv.h" -#include "TurnonFit.h" - -class TurnonManager -{ - public: - TurnonManager(); - ~TurnonManager(); - - bool readConfig(const std::string& config); - void fit(); - - private: - TEnv m_params; - int m_nCPU; - bool m_noFit; - std::vector m_turnonFits; - TFile* m_outputFile; - - -}; - - -#endif diff --git a/TauTagAndProbe/test/fitter/include/Utilities.h b/TauTagAndProbe/test/fitter/include/Utilities.h deleted file mode 100644 index c2edc840222..00000000000 --- a/TauTagAndProbe/test/fitter/include/Utilities.h +++ /dev/null @@ -1,79 +0,0 @@ -/** - * @file Utilities.h - * @brief Definition of global functions. - * - * - * @author Jean-Baptiste Sauvan - * - * @date 03/24/2010 - * - * @internal - * Created : 03/24/2010 - * Last update : 03/24/2010 05:50:22 PM - * by : JB Sauvan - * - * ===================================================================================== - */ - -#ifndef UTILITIES_H -#define UTILITIES_H - -#include -#include -#include -#include - - -namespace Utilities -{ - - /** - * @brief Separates a string into tokens. - * @param str : the string to tokenize - * @param delimiters : delimites tokens - * @return tokens - */ - void tokenize(const std::string& str, - std::vector& tokens, - const std::string& delimiter = " "); - - std::string intToString(int n); - - void findAndReplace(std::string& sInput, std::string sFind, std::string sReplace ); - - void strip(std::string& sInput); - - - /** - * @brief Converts a string into base types. - * @param s : string to convert - * @return t : converted string - */ - template - bool fromString(T& t, - const std::string& s, - std::ios_base& (*f)(std::ios_base&) = std::dec) - { - std::istringstream iss(s); - return !(iss >> f >> t).fail(); - } - - template std::vector stringToVector(const std::string& str) - { - std::vector tokens; - std::vector values; - tokenize(str, tokens); - for(unsigned b=0; bAddIncludePath("-I$ROOFITSYS/include"); - gSystem->Load("libRooFit"); -} diff --git a/TauTagAndProbe/test/fitter/obj/.gitignore b/TauTagAndProbe/test/fitter/obj/.gitignore deleted file mode 100644 index 5761abcfdf0..00000000000 --- a/TauTagAndProbe/test/fitter/obj/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.o diff --git a/TauTagAndProbe/test/fitter/results/.gitignore b/TauTagAndProbe/test/fitter/results/.gitignore deleted file mode 100644 index 4ea40f8315d..00000000000 --- a/TauTagAndProbe/test/fitter/results/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.root diff --git a/TauTagAndProbe/test/fitter/results/TurnOnPlot_DATA.py b/TauTagAndProbe/test/fitter/results/TurnOnPlot_DATA.py deleted file mode 100644 index f63f871883f..00000000000 --- a/TauTagAndProbe/test/fitter/results/TurnOnPlot_DATA.py +++ /dev/null @@ -1,169 +0,0 @@ -import ROOT -import operator -import array - - -ROOT.gSystem.Load('libRooFit') - - -class TurnOn: - def __init__(self, **args): - self.name = args.get("Name", "turnon") - #self.legend = args.get("Legend","") - self.legend = args.get("Legend","Turn-on") - self.histo = args.get("Histo", None) - self.fit = args.get("Fit", None) - self.markerColor = args.get("MarkerColor", ROOT.kBlack) - self.markerStyle = args.get("MarkerStyle", 20) - self.lineColor = args.get("LineColor", ROOT.kBlack) - self.lineStyle = args.get("LineStyle", 1) - self.histo.SetName(self.name+"_histo") - self.fit.SetName(self.name+"_fit") - - - -class TurnOnPlot: - def __init__(self, **args): - self.name = "" - self.turnons = [] - self.plotDir = "plots/" - self.xRange = (10, 120) - self.xTitle = "Offline p_{T}^{#tau} [GeV]" - #self.legendPosition = (0.6,0.2,0.9,0.4) - self.legendPosition = (0.4,0.2,0.9,0.6) - self.setPlotStyle() - #self.triggerName = args.get("TriggerName", "Turn-On") - self.triggerName = args.get("TriggerName", "") - - def addTurnOn(self, turnon): - self.turnons.append(turnon) - - def plot(self): - canvas = ROOT.TCanvas("c_"+self.name, self.name, 800, 800) - canvas.SetGrid() - canvas.SetLogx() - hDummy = ROOT.TH1F("hDummy_"+self.name, self.name, 1, self.xRange[0], self.xRange[1]) - hDummy.SetAxisRange(0, 1.05, "Y") - hDummy.SetXTitle(self.xTitle) - hDummy.GetXaxis().SetMoreLogLabels() - #hDummy.SetYTitle("Test") - hDummy.SetYTitle("Efficiency") - hDummy.Draw() - - - cmsTextFont = 42 # font of the "CMS" label - cmsTextSize = 0.76*0.05 # font size of the "CMS" label - extraTextFont = 52 # for the "preliminary" - extraTextSize = cmsTextSize # for the "preliminary" - xpos = 0.16 - ypos = 0.95 - - CMSbox = ROOT.TLatex (xpos, ypos , "#bf{CMS} #it{Preliminary}") - extraTextBox = ROOT.TLatex (xpos, ypos - 0.05 , "#it{Preliminary}") - CMSbox.SetNDC() - extraTextBox.SetNDC() - CMSbox.SetTextSize(cmsTextSize) - CMSbox.SetTextFont(cmsTextFont) - CMSbox.SetTextColor(ROOT.kBlack) - CMSbox.SetTextAlign(11) - extraTextBox.SetTextSize(extraTextSize) - extraTextBox.SetTextFont(extraTextFont) - extraTextBox.SetTextColor(ROOT.kBlack) - extraTextBox.SetTextAlign(13) - - triggerNameBox = ROOT.TLatex(0.15, 0.95, self.triggerName) - triggerNameBox.SetNDC() - triggerNameBox.SetTextFont(42) - triggerNameBox.SetTextSize(extraTextSize) - triggerNameBox.SetTextColor(ROOT.kBlack) - triggerNameBox.SetTextAlign(11) - - # lumi_num = float(cfg.readOption ("general::lumi")) - # lumi_num = lumi_num/1000. # from pb-1 to fb-1 - # lumi = "%.1f fb^{-1} (13 TeV)" % lumi_num - lumi = "41.5 fb^{-1} (13 TeV, 2017)" # RunB - RunF - lumibox = ROOT.TLatex (0.953, 0.95, lumi) - lumibox.SetNDC() - lumibox.SetTextAlign(31) - lumibox.SetTextSize(extraTextSize) - lumibox.SetTextFont(42) - lumibox.SetTextColor(ROOT.kBlack) - #Line legend - legend = ROOT.TLegend(self.legendPosition[0],self.legendPosition[1],self.legendPosition[2],self.legendPosition[3]) - legend.SetTextFont(42) - legend.SetFillColor(0) - legend.SetTextSize(1*extraTextSize) - legend.SetBorderSize(0) - legend.SetFillColor(0) - legend.SetFillStyle(0) - '''legend1 = ROOT.TLegend(0.14, 0.80, 0.80, 1.02) - legend1.SetBorderSize(0) - legend1.SetTextFont(62) - legend1.SetTextSize(0.025) - legend1.SetLineColor(0) - legend1.SetLineStyle(1) - legend1.SetLineWidth(1) - legend1.SetFillColor(0) - legend1.SetFillStyle(0) - legend1.AddEntry("NULL","CMS Preliminary: #sqrt{s}=13 TeV","h") - legend1.AddEntry("NULL","L1 Threshold : 28 GeV","h")''' - - for turnon in self.turnons: - histo = turnon.histo - histo.SetMarkerStyle(turnon.markerStyle) - histo.SetMarkerColor(turnon.markerColor) - histo.SetLineColor(turnon.markerColor) - histo.SetMarkerSize(1) - histo.SetLineWidth(2) - fit = turnon.fit - fit.SetLineStyle(turnon.lineStyle) - fit.SetLineColor(turnon.lineColor) - fit.SetLineWidth(2) - histo.Draw("p same") - fit.Draw("l same") - # legends - legend.AddEntry(histo, turnon.legend, "pel") - legend.Draw() - #if self.name=="turnon_Stage1_Stage2_EB": - #triggerNameBox.Draw() - CMSbox.Draw() - #extraTextBox.Draw() - lumibox.Draw() - #print ("DEBUG: " + self.plotDir+"/"+self.name+".eps") - canvas.Print(self.plotDir+"/"+self.name+".pdf", "pdf") - canvas.Print(self.plotDir+"/"+self.name+".png", "png") - return canvas - - - def setPlotStyle(self): - ROOT.gROOT.SetStyle("Plain") - ROOT.gStyle.SetOptStat() - ROOT.gStyle.SetOptFit(0) - ROOT.gStyle.SetOptTitle(0) - ROOT.gStyle.SetFrameLineWidth(1) - ROOT.gStyle.SetPadBottomMargin(0.13) - ROOT.gStyle.SetPadLeftMargin(0.15) - ROOT.gStyle.SetPadTopMargin(0.06) - ROOT.gStyle.SetPadRightMargin(0.05) - - ROOT.gStyle.SetLabelFont(42,"X") - ROOT.gStyle.SetLabelFont(42,"Y") - ROOT.gStyle.SetLabelSize(0.04,"X") - ROOT.gStyle.SetLabelSize(0.04,"Y") - ROOT.gStyle.SetLabelOffset(0.01,"Y") - ROOT.gStyle.SetTickLength(0.02,"X") - ROOT.gStyle.SetTickLength(0.02,"Y") - ROOT.gStyle.SetLineWidth(1) - ROOT.gStyle.SetTickLength(0.02 ,"Z") - - ROOT.gStyle.SetTitleSize(0.1) - ROOT.gStyle.SetTitleFont(42,"X") - ROOT.gStyle.SetTitleFont(42,"Y") - ROOT.gStyle.SetTitleSize(0.05,"X") - ROOT.gStyle.SetTitleSize(0.05,"Y") - ROOT.gStyle.SetTitleOffset(1.1,"X") - ROOT.gStyle.SetTitleOffset(1.4,"Y") - ROOT.gStyle.SetOptStat(0) - ROOT.gStyle.SetPalette(1) - ROOT.gStyle.SetPaintTextFormat("3.2f") - ROOT.gROOT.ForceStyle() diff --git a/TauTagAndProbe/test/fitter/results/plot_DATA_example.py b/TauTagAndProbe/test/fitter/results/plot_DATA_example.py deleted file mode 100644 index 38796111e7a..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_DATA_example.py +++ /dev/null @@ -1,80 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - - -plots = [] -plots.append(TurnOnPlot.TurnOnPlot()) -plots[-1].name = "turnon_EB_plot" -plots[-1].xRange = (10,109.9) -#plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - -#EE -plots.append(TurnOnPlot.TurnOnPlot()) -plots[-1].name = "turnon_EE_plot" -plots[-1].xRange = (10,109.9) -#plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - -#open Non-Calib plot -inputFile = ROOT.TFile.Open("./TurnOnDataStage2NoCalibAll/Tau_stage1_stage2_EB_EE_All_Iso.root") - -histo_EB = inputFile.Get("histo_Stage2_Barrel_vs_Pt") -histo_EB.__class__ = ROOT.RooHist -histo_EE = inputFile.Get("histo_Stage2_Endcaps_vs_Pt") -histo_EE.__class__ = ROOT.RooHist - - -fit_EB = inputFile.Get("fit_Stage2_Barrel_vs_Pt") -fit_EB.__class__ = ROOT.RooCurve -fit_EE = inputFile.Get("fit_Stage2_Endcaps_vs_Pt") -fit_EE.__class__ = ROOT.RooCurve - -turnon_EB = TurnOnPlot.TurnOn(Name="Stage2_EB_noIso", Histo=histo_EB, Fit=fit_EB, - MarkerColor=ROOT.kBlack, MarkerStyle=20, LineColor=ROOT.kBlack,LineStyle=1, - Legend="Barrel - uncalibrated") - -turnon_EE = TurnOnPlot.TurnOn(Name="Stage2_EE_noIso", Histo=histo_EE, Fit=fit_EE, - MarkerColor=ROOT.kBlack, MarkerStyle=20, LineColor=ROOT.kBlack,LineStyle=1, - Legend="Endcap - uncalibrated") - - -plots[0].addTurnOn(turnon_EB) -plots[1].addTurnOn(turnon_EE) - - - - -#open Calib plot -inputFile = ROOT.TFile.Open("./TurnOnDataStage2CalibAll_13Gen/Tau_stage1_stage2_EB_EE_All_Iso.root") - -histo_EB = inputFile.Get("histo_Stage2_Barrel_vs_Pt") -histo_EB.__class__ = ROOT.RooHist -histo_EE = inputFile.Get("histo_Stage2_Endcaps_vs_Pt") -histo_EE.__class__ = ROOT.RooHist - - -fit_EB = inputFile.Get("fit_Stage2_Barrel_vs_Pt") -fit_EB.__class__ = ROOT.RooCurve -fit_EE = inputFile.Get("fit_Stage2_Endcaps_vs_Pt") -fit_EE.__class__ = ROOT.RooCurve - -turnon_EB = TurnOnPlot.TurnOn(Name="Stage2_EB_noIso", Histo=histo_EB, Fit=fit_EB, - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Barrel - calibrated") - -turnon_EE = TurnOnPlot.TurnOn(Name="Stage2_EE_noIso", Histo=histo_EE, Fit=fit_EE, - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Endcap - calibrated") - - -plots[0].addTurnOn(turnon_EB) -plots[1].addTurnOn(turnon_EE) - - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - -inputFile.Close() - diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_All_DifferentThresholds_L1T.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_All_DifferentThresholds_L1T.py deleted file mode 100644 index 4b6369b6266..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_All_DifferentThresholds_L1T.py +++ /dev/null @@ -1,92 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - - -plots = [] -plots.append(TurnOnPlot.TurnOnPlot(TriggerName="L1 turn-on curves, iso + non-iso")) -plots[-1].name = "turnOn_All" -plots[-1].xRange = (10,109.9) -#plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - -#EE -plots.append(TurnOnPlot.TurnOnPlot(TriggerName="L1 turn-on curves, iso")) -plots[-1].name = "turnOn_All_iso" -plots[-1].xRange = (10,109.9) -#plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - -#open turn on file -inputFile = ROOT.TFile.Open("FittedTurnOn_Final_MC.root") - -histo_26GeV = inputFile.Get("histo_Stage2_All_vs_Pt_26GeV") -histo_26GeV.__class__ = ROOT.RooHist -histo_30GeV = inputFile.Get("histo_Stage2_All_vs_Pt_30GeV") -histo_30GeV.__class__ = ROOT.RooHist -histo_34GeV = inputFile.Get("histo_Stage2_All_vs_Pt_34GeV") -histo_34GeV.__class__ = ROOT.RooHist - -fit_26GeV = inputFile.Get("fit_Stage2_All_vs_Pt_26GeV") -fit_26GeV.__class__ = ROOT.RooCurve -fit_30GeV = inputFile.Get("fit_Stage2_All_vs_Pt_30GeV") -fit_30GeV.__class__ = ROOT.RooCurve -fit_34GeV = inputFile.Get("fit_Stage2_All_vs_Pt_34GeV") -fit_34GeV.__class__ = ROOT.RooCurve - -turnon_26GeV = TurnOnPlot.TurnOn(Name="turnOn_All_26", Histo=histo_26GeV, Fit=fit_26GeV, - MarkerColor=ROOT.kBlack, MarkerStyle=20, LineColor=ROOT.kBlack,LineStyle=1, - Legend="pt > 26 GeV") - -turnon_30GeV = TurnOnPlot.TurnOn(Name="turnOn_All_30", Histo=histo_30GeV, Fit=fit_30GeV, - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="pt > 30 GeV") - -turnon_34GeV = TurnOnPlot.TurnOn(Name="turnOn_All_34", Histo=histo_34GeV, Fit=fit_34GeV, - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="pt > 34 GeV") - - -plots[0].addTurnOn(turnon_26GeV) -plots[0].addTurnOn(turnon_30GeV) -plots[0].addTurnOn(turnon_34GeV) - - -histo_26GeV_iso = inputFile.Get("histo_Stage2_All_vs_Pt_26GeV_iso") -histo_26GeV_iso.__class__ = ROOT.RooHist -histo_30GeV_iso = inputFile.Get("histo_Stage2_All_vs_Pt_30GeV_iso") -histo_30GeV_iso.__class__ = ROOT.RooHist -histo_34GeV_iso = inputFile.Get("histo_Stage2_All_vs_Pt_34GeV_iso") -histo_34GeV_iso.__class__ = ROOT.RooHist - -fit_26GeV_iso = inputFile.Get("fit_Stage2_All_vs_Pt_26GeV_iso") -fit_26GeV_iso.__class__ = ROOT.RooCurve -fit_30GeV_iso = inputFile.Get("fit_Stage2_All_vs_Pt_30GeV_iso") -fit_30GeV_iso.__class__ = ROOT.RooCurve -fit_34GeV_iso = inputFile.Get("fit_Stage2_All_vs_Pt_34GeV_iso") -fit_34GeV_iso.__class__ = ROOT.RooCurve - -turnon_26GeV_iso = TurnOnPlot.TurnOn(Name="turnOn_All_26_iso", Histo=histo_26GeV_iso, Fit=fit_26GeV_iso, - MarkerColor=ROOT.kBlack, MarkerStyle=20, LineColor=ROOT.kBlack,LineStyle=1, - Legend="pt > 26 GeV") - -turnon_30GeV_iso = TurnOnPlot.TurnOn(Name="turnOn_All_30_iso", Histo=histo_30GeV_iso, Fit=fit_30GeV_iso, - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="pt > 30 GeV") - -turnon_34GeV_iso = TurnOnPlot.TurnOn(Name="turnOn_All_34_iso", Histo=histo_34GeV_iso, Fit=fit_34GeV_iso, - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="pt > 34 GeV") - - -plots[1].addTurnOn(turnon_26GeV_iso) -plots[1].addTurnOn(turnon_30GeV_iso) -plots[1].addTurnOn(turnon_34GeV_iso) - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - - -inputFile.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_Barrel_vs_Endcap.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_Barrel_vs_Endcap.py deleted file mode 100644 index 66ef8aa9dd9..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_Barrel_vs_Endcap.py +++ /dev/null @@ -1,56 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -triggers = ["HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v", "HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v", "Pt_26GeV", "Pt_34GeV"] -#triggers = ["Pt_30GeV"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Barrel - Endcaps", "HLT MediumIsoPFTau20 Barrel - Endcaps"] -# ROOT FILE CONTAINING THE Barrel -fileName = "FittedTurnOn_Final_MC.root" - -### Do not edit from here ### - -#open turn on file -inputFile= ROOT.TFile.Open(fileName) - -histo_Barrel = [] -histo_Endcaps = [] -fit_Barrel = [] -fit_Endcaps = [] -turnon_Barrel = [] -turnon_Endcaps = [] -plots = [] - -for trigger in triggers: - histo_Barrel.append(inputFile.Get("histo_Stage2_Barrel_vs_" + trigger)) - histo_Barrel[-1].__class__ = ROOT.RooHist - histo_Endcaps.append(inputFile.Get("histo_Stage2_Endcaps_vs_" + trigger)) - histo_Endcaps[-1].__class__ = ROOT.RooHist - fit_Barrel.append(inputFile.Get("fit_Stage2_Barrel_vs_" + trigger)) - fit_Barrel[-1].__class__ = ROOT.RooCurve - fit_Endcaps.append(inputFile.Get("fit_Stage2_Endcaps_vs_" + trigger)) - fit_Endcaps[-1].__class__ = ROOT.RooCurve - turnon_Barrel.append(TurnOnPlot.TurnOn(Name="Stage2_Barrel", Histo=histo_Barrel[-1], Fit=fit_Barrel[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="Barrel")) - turnon_Endcaps.append(TurnOnPlot.TurnOn(Name="Stage2_Endcaps", Histo=histo_Endcaps[-1], Fit=fit_Endcaps[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Endcaps")) - plots.append(TurnOnPlot.TurnOnPlot(TriggerName = trigger + "Barrel - Endcaps")) - plots[-1].name = "turnOn_Barrel_Endcaps_" + trigger - plots[-1].xRange = (10,109.9) - #plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].addTurnOn(turnon_Barrel[-1]) - plots[-1].addTurnOn(turnon_Endcaps[-1]) - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - -inputFile.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_CombinedData_vs_MC_2017.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_CombinedData_vs_MC_2017.py deleted file mode 100644 index 901c3d96924..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_CombinedData_vs_MC_2017.py +++ /dev/null @@ -1,66 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -triggers = ["HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v","HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"] - -#triggers = ["ETauTriggerPath_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30","MuTauTriggerPath_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau32", "DiTauTriggerPath_TightTau35orMediumTau40TightIDorTightTau40_plusL1Tau32"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Data - MC", "HLT MediumIsoPFTau20 Data - MC"] -# ROOT FILE CONTAINING THE DATA -dataFileName = "FitResults/2018_01_14/fitOutput_Data_MuTau2017BCDEF_SFpaths_SSsubtraction_vtightTauMVAWP.root" -# ROOT FILE CONTAINING THE MC -mcFileName = "FitResults/2018_01_14/fitOutput_MC_MuTau2017_DYJetsFall17_nomPlusExt_SFpaths_OStaugenmatchPositive_vtightTauMVAWP.root" - -### Do not edit from here ### - -#open turn on file -inputFile_Data = ROOT.TFile.Open(dataFileName) -inputFile_MC = ROOT.TFile.Open(mcFileName) - -histo_Data = [] -histo_MC = [] -fit_Data = [] -fit_MC = [] -turnon_Data = [] -turnon_MC = [] -plots = [] - -for trigger in triggers: - histo_Data.append(inputFile_Data.Get("histo_" + trigger)) - histo_Data[-1].__class__ = ROOT.RooHist - histo_MC.append(inputFile_MC.Get("histo_" + trigger)) - histo_MC[-1].__class__ = ROOT.RooHist - fit_Data.append(inputFile_Data.Get("fit_" + trigger)) - fit_Data[-1].__class__ = ROOT.RooCurve - fit_MC.append(inputFile_MC.Get("fit_" + trigger)) - fit_MC[-1].__class__ = ROOT.RooCurve - turnon_Data.append(TurnOnPlot.TurnOn(Name="Stage2_Data", Histo=histo_Data[-1], Fit=fit_Data[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="Data")) # 2017, Run[B-F]")) - turnon_MC.append(TurnOnPlot.TurnOn(Name="Stage2_MC", Histo=histo_MC[-1], Fit=fit_MC[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Simulation"))#MC 2017, DYJets")) - plots.append(TurnOnPlot.TurnOnPlot(TriggerName = trigger + "Data - MC")) - plots[-1].name = "turnOn_2017_Data_vs_MC_vtightTauMVAWP_" + trigger - plots[-1].xRange = (20,500) - #plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].legendPosition = (0.55,0.3,0.85,0.45) - plots[-1].addTurnOn(turnon_MC[-1]) - plots[-1].addTurnOn(turnon_Data[-1]) - - - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - #print "plot:", plot.name - #canvas.Update() - #canvas.Print(plots[-1].name , "png") - -inputFile_Data.Close() -inputFile_MC.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_DataB_vs_DataC_vs_DY.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_DataB_vs_DataC_vs_DY.py deleted file mode 100644 index 028dcbc2bdc..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_DataB_vs_DataC_vs_DY.py +++ /dev/null @@ -1,76 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -triggers = ["HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v", "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v", "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Data - MC", "HLT MediumIsoPFTau20 Data - MC"] -# ROOT FILE CONTAINING THE DATA -dataBFileName = "TestTurnOn/fitOutput_Data_MuTau2017B.root" -dataCFileName = "TestTurnOn/fitOutput_Data_MuTau2017C.root" -# ROOT FILE CONTAINING THE MC -mcFileName = "TestTurnOn/fitOutput_DY_BadPixGT.root" - -### Do not edit from here ### - -#open turn on file -inputFile_DataB = ROOT.TFile.Open(dataBFileName) -inputFile_DataC = ROOT.TFile.Open(dataCFileName) -inputFile_MC = ROOT.TFile.Open(mcFileName) - -histo_DataB = [] -histo_DataC = [] -histo_MC = [] -fit_DataB = [] -fit_DataC = [] -fit_MC = [] -turnon_DataB = [] -turnon_DataC = [] -turnon_MC = [] -plots = [] - -for trigger in triggers: - histo_DataB.append(inputFile_DataB.Get("histo_" + trigger)) - histo_DataB[-1].__class__ = ROOT.RooHist - histo_DataC.append(inputFile_DataC.Get("histo_" + trigger)) - histo_DataC[-1].__class__ = ROOT.RooHist - histo_MC.append(inputFile_MC.Get("histo_" + trigger)) - histo_MC[-1].__class__ = ROOT.RooHist - fit_DataB.append(inputFile_DataB.Get("fit_" + trigger)) - fit_DataB[-1].__class__ = ROOT.RooCurve - fit_DataC.append(inputFile_DataC.Get("fit_" + trigger)) - fit_DataC[-1].__class__ = ROOT.RooCurve - fit_MC.append(inputFile_MC.Get("fit_" + trigger)) - fit_MC[-1].__class__ = ROOT.RooCurve - turnon_DataB.append(TurnOnPlot.TurnOn(Name="Stage2_DataB", Histo=histo_DataB[-1], Fit=fit_DataB[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="Data up to 18/07/2017 (4.6 fb^{-1})")) - turnon_DataC.append(TurnOnPlot.TurnOn(Name="Stage2_DataC", Histo=histo_DataC[-1], Fit=fit_DataC[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Data from 18/07/2017 (1.2 fb^{-1})")) - turnon_MC.append(TurnOnPlot.TurnOn(Name="Stage2_MC", Histo=histo_MC[-1], Fit=fit_MC[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Simulation")) - plots.append(TurnOnPlot.TurnOnPlot(TriggerName = trigger + "Data B - Data C - MC")) - plots[-1].name = "turnOn_Data_2017B_2017C_DY_" + trigger - plots[-1].xRange = (20,500) - #if(trigger=="HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v"): - # plots[-1].xRange = (20,500) - #plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].legendPosition = (0.4,0.2,0.9,0.4) - #plots[-1].addTurnOn(turnon_MC[-1]) - plots[-1].addTurnOn(turnon_DataB[-1]) - plots[-1].addTurnOn(turnon_DataC[-1]) - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - - -inputFile_DataB.Close() -inputFile_DataC.Close() -inputFile_MC.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_Data_vs_MC.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_Data_vs_MC.py deleted file mode 100644 index 8034049bda0..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_Data_vs_MC.py +++ /dev/null @@ -1,60 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -triggers = ["HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v", "HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v", "Pt_26GeV", "Pt_34GeV"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Data - MC", "HLT MediumIsoPFTau20 Data - MC"] -# ROOT FILE CONTAINING THE DATA -dataFileName = "FittedTurnOn_Final_Data_0_500GeV.root" -# ROOT FILE CONTAINING THE MC -mcFileName = "FittedTurnOn_Final_MC_0_500GeV.root" - -### Do not edit from here ### - -#open turn on file -inputFile_Data = ROOT.TFile.Open(dataFileName) -inputFile_MC = ROOT.TFile.Open(mcFileName) - -histo_Data = [] -histo_MC = [] -fit_Data = [] -fit_MC = [] -turnon_Data = [] -turnon_MC = [] -plots = [] - -for trigger in triggers: - histo_Data.append(inputFile_Data.Get("histo_Stage2_All_vs_" + trigger)) - histo_Data[-1].__class__ = ROOT.RooHist - histo_MC.append(inputFile_MC.Get("histo_Stage2_All_vs_" + trigger)) - histo_MC[-1].__class__ = ROOT.RooHist - fit_Data.append(inputFile_Data.Get("fit_Stage2_All_vs_" + trigger)) - fit_Data[-1].__class__ = ROOT.RooCurve - fit_MC.append(inputFile_MC.Get("fit_Stage2_All_vs_" + trigger)) - fit_MC[-1].__class__ = ROOT.RooCurve - turnon_Data.append(TurnOnPlot.TurnOn(Name="Stage2_Data", Histo=histo_Data[-1], Fit=fit_Data[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="Data")) - turnon_MC.append(TurnOnPlot.TurnOn(Name="Stage2_MC", Histo=histo_MC[-1], Fit=fit_MC[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Simulation")) - plots.append(TurnOnPlot.TurnOnPlot(TriggerName = trigger + "Data - MC")) - plots[-1].name = "turnOn_Data_MC_" + trigger - plots[-1].xRange = (10,120) - #plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].addTurnOn(turnon_Data[-1]) - plots[-1].addTurnOn(turnon_MC[-1]) - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - - -inputFile_Data.Close() -inputFile_MC.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_EachDataEra_vs_MC_2017.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_EachDataEra_vs_MC_2017.py deleted file mode 100644 index b31d93b8d89..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_EachDataEra_vs_MC_2017.py +++ /dev/null @@ -1,120 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -triggers = ["HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v", "HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v", "HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Data - MC", "HLT MediumIsoPFTau20 Data - MC"] -# ROOT FILE CONTAINING THE DATA -data17BFileName = "TurnOn_Sync/2017_11_23/fitOutput_Data_MuTau2017B_PRv1v2_23112017_v2.root" -data17CFileName = "TurnOn_Sync/2017_11_23/fitOutput_Data_MuTau2017C_PRv1v2v3_2311201_v2.root" -data17DFileName = "TurnOn_Sync/2017_11_23/fitOutput_Data_MuTau2017D_PRv1_23112017_v2.root" -data17EFileName = "TurnOn_Sync/2017_11_23/fitOutput_Data_MuTau2017E_PRv1_23112017_v2.root" -data17FFileName = "TurnOn_Sync/2017_11_23/fitOutput_Data_MuTau2017F_PRv1_23112017_v2.root" -# ROOT FILE CONTAINING THE MC -mcFileName = "TurnOn_Sync/2017_11_23/fitOutput_MC_MuTau2017v10_ext1_v1v2_pileup_23112017_DitauFit.root" - - -### Do not edit from here ### - -#open turn on file -inputFile_Data2017B = ROOT.TFile.Open(data17BFileName) -inputFile_Data2017C = ROOT.TFile.Open(data17CFileName) -inputFile_Data2017D = ROOT.TFile.Open(data17DFileName) -inputFile_Data2017E = ROOT.TFile.Open(data17EFileName) -inputFile_Data2017F = ROOT.TFile.Open(data17FFileName) -inputFile_MC = ROOT.TFile.Open(mcFileName) - -histo_Data2017B = [] -histo_Data2017C = [] -histo_Data2017D = [] -histo_Data2017E = [] -histo_Data2017F = [] -histo_MC = [] -fit_Data2017B = [] -fit_Data2017C = [] -fit_Data2017D = [] -fit_Data2017E = [] -fit_Data2017F = [] -fit_MC = [] -turnon_Data2017B = [] -turnon_Data2017C = [] -turnon_Data2017D = [] -turnon_Data2017E = [] -turnon_Data2017F = [] -turnon_MC = [] -plots = [] - -for trigger in triggers: - histo_Data2017B.append(inputFile_Data2017B.Get("histo_" + trigger)) - histo_Data2017B[-1].__class__ = ROOT.RooHist - histo_Data2017C.append(inputFile_Data2017C.Get("histo_" + trigger)) - histo_Data2017C[-1].__class__ = ROOT.RooHist - histo_Data2017D.append(inputFile_Data2017D.Get("histo_" + trigger)) - histo_Data2017D[-1].__class__ = ROOT.RooHist - histo_Data2017E.append(inputFile_Data2017E.Get("histo_" + trigger)) - histo_Data2017E[-1].__class__ = ROOT.RooHist - histo_Data2017F.append(inputFile_Data2017F.Get("histo_" + trigger)) - histo_Data2017F[-1].__class__ = ROOT.RooHist - histo_MC.append(inputFile_MC.Get("histo_" + trigger)) - histo_MC[-1].__class__ = ROOT.RooHist - fit_Data2017B.append(inputFile_Data2017B.Get("fit_" + trigger)) - fit_Data2017B[-1].__class__ = ROOT.RooCurve - fit_Data2017C.append(inputFile_Data2017C.Get("fit_" + trigger)) - fit_Data2017C[-1].__class__ = ROOT.RooCurve - fit_Data2017D.append(inputFile_Data2017D.Get("fit_" + trigger)) - fit_Data2017D[-1].__class__ = ROOT.RooCurve - fit_Data2017E.append(inputFile_Data2017E.Get("fit_" + trigger)) - fit_Data2017E[-1].__class__ = ROOT.RooCurve - fit_Data2017F.append(inputFile_Data2017F.Get("fit_" + trigger)) - fit_Data2017F[-1].__class__ = ROOT.RooCurve - fit_MC.append(inputFile_MC.Get("fit_" + trigger)) - fit_MC[-1].__class__ = ROOT.RooCurve - turnon_Data2017B.append(TurnOnPlot.TurnOn(Name="Stage2_DataB", Histo=histo_Data2017B[-1], Fit=fit_Data2017B[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="2017 Data B")) - turnon_Data2017C.append(TurnOnPlot.TurnOn(Name="Stage2_DataC", Histo=histo_Data2017C[-1], Fit=fit_Data2017C[-1], - MarkerColor=ROOT.kGreen+2, MarkerStyle=20, LineColor=ROOT.kGreen+2,LineStyle=1, - Legend="2017 Data C")) - turnon_Data2017D.append(TurnOnPlot.TurnOn(Name="Stage2_DataD", Histo=histo_Data2017D[-1], Fit=fit_Data2017D[-1], - MarkerColor=ROOT.kAzure+8, MarkerStyle=20, LineColor=ROOT.kAzure+8,LineStyle=1, - Legend="2017 Data D")) - turnon_Data2017E.append(TurnOnPlot.TurnOn(Name="Stage2_DataE", Histo=histo_Data2017E[-1], Fit=fit_Data2017E[-1], - MarkerColor=ROOT.kOrange+3, MarkerStyle=20, LineColor=ROOT.kOrange+3,LineStyle=1, - Legend="2017 Data E")) - turnon_Data2017F.append(TurnOnPlot.TurnOn(Name="Stage2_DataF", Histo=histo_Data2017F[-1], Fit=fit_Data2017F[-1], - MarkerColor=ROOT.kViolet-2, MarkerStyle=20, LineColor=ROOT.kViolet-2,LineStyle=1, - Legend="2017 Data F")) - turnon_MC.append(TurnOnPlot.TurnOn(Name="Stage2_MC", Histo=histo_MC[-1], Fit=fit_MC[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Simulation")) - plots.append(TurnOnPlot.TurnOnPlot(TriggerName = trigger + "Data - MC")) - plots[-1].name = "turnOn_2017_EachDataEra_vs_MC_" + trigger - plots[-1].xRange = (20,500) - #plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].legendPosition = (0.6,0.2,0.9,0.45) - plots[-1].addTurnOn(turnon_MC[-1]) - plots[-1].addTurnOn(turnon_Data2017B[-1]) - plots[-1].addTurnOn(turnon_Data2017C[-1]) - plots[-1].addTurnOn(turnon_Data2017D[-1]) - plots[-1].addTurnOn(turnon_Data2017E[-1]) - plots[-1].addTurnOn(turnon_Data2017F[-1]) - - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - #print "plot:", plot.name - #canvas.Update() - #canvas.Print(plots[-1].name , "png") - -inputFile_Data2017B.Close() -inputFile_Data2017C.Close() -inputFile_Data2017D.Close() -inputFile_Data2017E.Close() -inputFile_Data2017F.Close() -inputFile_MC.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_Iso_Vs_NonIso.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_Iso_Vs_NonIso.py deleted file mode 100644 index c133699e5d2..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_Iso_Vs_NonIso.py +++ /dev/null @@ -1,79 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -triggers = ["Pt_26GeV","Pt_30GeV","Pt_34GeV"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Barrel - Endcaps", "HLT MediumIsoPFTau20 Barrel - Endcaps"] -# ROOT FILE CONTAINING THE Barrel -fileName = "FittedTurnOn_Final_Data_0_500GeV.root" -fileName2 = "FittedTurnOn_LooseIsoPFTau32.root" - -### Do not edit from here ### - -#open turn on file -inputFile = ROOT.TFile.Open(fileName) -inputFile2 = ROOT.TFile.Open(fileName2) - -histo_NonIso = [] -histo_Iso = [] -fit_NonIso = [] -fit_Iso = [] -turnon_NonIso = [] -turnon_Iso = [] -plots = [] - -for trigger in triggers: - histo_NonIso.append(inputFile.Get("histo_Stage2_All_vs_" + trigger)) - histo_NonIso[-1].__class__ = ROOT.RooHist - histo_Iso.append(inputFile.Get("histo_Stage2_All_vs_" + trigger + "_iso")) - histo_Iso[-1].__class__ = ROOT.RooHist - fit_NonIso.append(inputFile.Get("fit_Stage2_All_vs_" + trigger)) - fit_NonIso[-1].__class__ = ROOT.RooCurve - fit_Iso.append(inputFile.Get("fit_Stage2_All_vs_" + trigger + "_iso")) - fit_Iso[-1].__class__ = ROOT.RooCurve - turnon_NonIso.append(TurnOnPlot.TurnOn(Name="Stage2_NonIso", Histo=histo_NonIso[-1], Fit=fit_NonIso[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="Non isolated")) - turnon_Iso.append(TurnOnPlot.TurnOn(Name="Stage2_Iso", Histo=histo_Iso[-1], Fit=fit_Iso[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Isolated")) - plots.append(TurnOnPlot.TurnOnPlot(TriggerName = trigger + "Non iso - iso")) - plots[-1].name = "turnOn_NonIso_Iso_" + trigger - plots[-1].xRange = (10,109.9) - #plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].legendPosition = (0.6,0.2,0.9,0.4) - plots[-1].addTurnOn(turnon_NonIso[-1]) - plots[-1].addTurnOn(turnon_Iso[-1]) - -histo_NonIso.append(inputFile2.Get("histo_Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau32_Trk1_eta2p1_Reg_v")) -histo_NonIso[-1].__class__ = ROOT.RooHist -histo_Iso.append(inputFile.Get("histo_Stage2_All_vs_HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v")) -histo_Iso[-1].__class__ = ROOT.RooHist -fit_NonIso.append(inputFile2.Get("fit_Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau32_Trk1_eta2p1_Reg_v")) -fit_NonIso[-1].__class__ = ROOT.RooCurve -fit_Iso.append(inputFile.Get("fit_Stage2_All_vs_HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v")) -fit_Iso[-1].__class__ = ROOT.RooCurve -turnon_NonIso.append(TurnOnPlot.TurnOn(Name="Stage2_NonIso", Histo=histo_NonIso[-1], Fit=fit_NonIso[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="Non isolated")) -turnon_Iso.append(TurnOnPlot.TurnOn(Name="Stage2_Iso", Histo=histo_Iso[-1], Fit=fit_Iso[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="Isolated")) -plots.append(TurnOnPlot.TurnOnPlot(TriggerName = "HLT PFTau 32 LooseIso-MediumIso")) -plots[-1].name = "turnOn_NonIso_Iso_PFTau32" -plots[-1].xRange = (10,109.9) -#plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].addTurnOn(turnon_NonIso[-1]) -plots[-1].addTurnOn(turnon_Iso[-1]) - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - -inputFile.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/results/plot_turnOn_SingleL1_vs_WithL1.py b/TauTagAndProbe/test/fitter/results/plot_turnOn_SingleL1_vs_WithL1.py deleted file mode 100644 index af5183138d2..00000000000 --- a/TauTagAndProbe/test/fitter/results/plot_turnOn_SingleL1_vs_WithL1.py +++ /dev/null @@ -1,70 +0,0 @@ -import ROOT -import TurnOnPlot_DATA as TurnOnPlot - -### Edit here ### - -# TRIGGERS MUST BE DECLARED -#triggers = ["HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v", "HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v", "Pt_26GeV", "Pt_34GeV"] -#triggers = ["Pt_30GeV"] -# PLOT TITLES -#plotTitles = ["HLT MediumIsoPFTau32 Barrel - Endcaps", "HLT MediumIsoPFTau20 Barrel - Endcaps"] -# ROOT FILE CONTAINING THE Barrel -fileName = "FittedTurnOn_Final_MC.root" -fileName20GeV = "FittedTurnOn_L1_20GeV.root" - -### Do not edit from here ### - -#open turn on file -inputFile= ROOT.TFile.Open(fileName) -inputFile20GeV= ROOT.TFile.Open(fileName20GeV) - -histo_SingleL1 = [] -histo_WithL1 = [] -fit_SingleL1 = [] -fit_WithL1 = [] -histo_20GeV = [] -fit_20GeV = [] - -turnon_SingleL1 = [] -turnon_WithL1 = [] -turnon_20GeV = [] -plots = [] - -#for trigger in triggers: -histo_SingleL1.append(inputFile.Get("histo_Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v")) -histo_SingleL1[-1].__class__ = ROOT.RooHist -histo_WithL1.append(inputFile.Get("histo_Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v")) -histo_WithL1[-1].__class__ = ROOT.RooHist -histo_20GeV.append(inputFile20GeV.Get("histo_Stage2_All_vs_Pt_20GeV")) -histo_20GeV[-1].__class__ = ROOT.RooHist -fit_SingleL1.append(inputFile.Get("fit_Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v")) -fit_SingleL1[-1].__class__ = ROOT.RooCurve -fit_WithL1.append(inputFile.Get("fit_Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v")) -fit_WithL1[-1].__class__ = ROOT.RooCurve -fit_20GeV.append(inputFile20GeV.Get("fit_Stage2_All_vs_Pt_20GeV")) -fit_20GeV[-1].__class__ = ROOT.RooCurve -turnon_SingleL1.append(TurnOnPlot.TurnOn(Name="Stage2_SingleL1", Histo=histo_SingleL1[-1], Fit=fit_SingleL1[-1], - MarkerColor=ROOT.kBlue, MarkerStyle=20, LineColor=ROOT.kBlue,LineStyle=1, - Legend="No L1T seed")) -turnon_WithL1.append(TurnOnPlot.TurnOn(Name="Stage2_WithL1", Histo=histo_WithL1[-1], Fit=fit_WithL1[-1], - MarkerColor=ROOT.kRed, MarkerStyle=20, LineColor=ROOT.kRed,LineStyle=1, - Legend="With L1T seed")) -turnon_20GeV.append(TurnOnPlot.TurnOn(Name="Stage2_20GeV", Histo=histo_20GeV[-1], Fit=fit_20GeV[-1], - MarkerColor=ROOT.kGreen, MarkerStyle=20, LineColor=ROOT.kGreen,LineStyle=1, - Legend="L1T 20 GeV turn-on")) -plots.append(TurnOnPlot.TurnOnPlot(TriggerName = "HLT LooseIsoPFTau20" + "No L1T seed - With L1T seed")) -plots[-1].name = "turnOn_SingleL1_WithL1_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v" -plots[-1].xRange = (10,109.9) -#plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].legendPosition = (0.6,0.2,0.9,0.4) -plots[-1].addTurnOn(turnon_SingleL1[-1]) -plots[-1].addTurnOn(turnon_WithL1[-1]) -plots[-1].addTurnOn(turnon_20GeV[-1]) - -canvas = [] -for plot in plots: - canvas.append(plot.plot()) - -inputFile.Close() - -raw_input() diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY.par deleted file mode 100644 index 69d9510faab..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY.par +++ /dev/null @@ -1,50 +0,0 @@ -OutputFile: results/FitResults/2018_01_14/fitOutput_MC_MuTau2017_DY1JetsFall17_v2.root -NCPU: 4 - -Turnon.N: 3 - -Turnon.1.Name: HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v -Turnon.1.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_RunIIFall17MiniAOD-94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTPath_0 -Turnon.1.WeightVar: bkgSubANDpuW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 0.1 0.01 50. -Turnon.1.CB.N: 20. 1.001 100. -Turnon.1.CB.Mean: 20. 0. 120. -Turnon.1.CB.Sigma: 3. 0.01 30 - - - -Turnon.2.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.2.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_RunIIFall17MiniAOD-94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTPath_8 -Turnon.2.WeightVar: bkgSubANDpuW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange:0 600 -Turnon.2.CB.Max: 1. 0.99 1. -Turnon.2.CB.Alpha: 0.1 0.01 50. -Turnon.2.CB.N: 10. 1.001 120. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 3. 0.01 10 - - - -Turnon.3.Name: HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v -Turnon.3.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_RunIIFall17MiniAOD-94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTPath_13 -Turnon.3.WeightVar: bkgSubANDpuW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange:0 600 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 0.1 0.01 50. -Turnon.3.CB.N: 20. 1.001 100. -Turnon.3.CB.Mean: 40. 0. 120. -Turnon.3.CB.Sigma: 3. 0.01 40 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY_2017completeList.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY_2017completeList.par deleted file mode 100644 index a56b331bc96..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY_2017completeList.par +++ /dev/null @@ -1,132 +0,0 @@ -OutputFile: results/FitResults/2018_01_14/fitOutput_MC_MuTau2017_DYJetsFall17_newVersion.root -NCPU: 4 - -Turnon.N: 9 - -Turnon.1.Name: ETauTriggerPath_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30 -Turnon.1.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTetauPath_13 -Turnon.1.WeightVar: bkgSubANDpuW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 0.1 0.01 50. -Turnon.1.CB.N: 20. 1.001 100. -Turnon.1.CB.Mean: 20. 0. 120. -Turnon.1.CB.Sigma: 3. 0.01 30 - - -Turnon.2.Name: MuTauTriggerPath_IsoMu20_LooseChargedIsoPFTau27 -Turnon.2.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTmutauPath_13 -Turnon.2.WeightVar: bkgSubANDpuW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange:0 600 -Turnon.2.CB.Max: 1. 0.99 1. -Turnon.2.CB.Alpha: 0.1 0.01 50. -Turnon.2.CB.N: 10. 1.001 120. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 3. 0.01 10 - - -Turnon.3.Name: DiTauTriggerPath_TightTau35orMediumTau40TightIDorTightTau40_plusL1Tau32 -Turnon.3.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTditauPath_11or20or21 -Turnon.3.WeightVar: bkgSubANDpuW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange:0 600 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 0.1 0.01 50. -Turnon.3.CB.N: 20. 1.001 100. -Turnon.3.CB.Mean: 40. 0. 120. -Turnon.3.CB.Sigma: 3. 0.01 40 - -Turnon.4.Name: DiTauTriggerPath_TightTau35TightIDorMediumTau35TightIDplusHLTTau40orTightTau35plusHLTTau40_plusL1Tau32 -Turnon.4.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.4.Tree: TagAndProbe -Turnon.4.XVar: tauPt -Turnon.4.Cut: hasHLTditauPath_9or10or11 -Turnon.4.WeightVar: bkgSubANDpuW -Turnon.4.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.4.FitRange:0 600 -Turnon.4.CB.Max: 1. 0.9 1. -Turnon.4.CB.Alpha: 0.1 0.01 50. -Turnon.4.CB.N: 20. 1.001 100. -Turnon.4.CB.Mean: 40. 0. 120. -Turnon.4.CB.Sigma: 3. 0.01 40 - -Turnon.5.Name: HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v -Turnon.5.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.5.Tree: TagAndProbe -Turnon.5.XVar: tauPt -Turnon.5.Cut: hasHLTPath_13 -Turnon.5.WeightVar: bkgSubANDpuW -Turnon.5.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.5.FitRange:0 600 -Turnon.5.CB.Max: 1. 0.9 1. -Turnon.5.CB.Alpha: 0.1 0.01 50. -Turnon.5.CB.N: 20. 1.001 100. -Turnon.5.CB.Mean: 40. 0. 120. -Turnon.5.CB.Sigma: 3. 0.01 40 - -Turnon.6.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.6.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.6.Tree: TagAndProbe -Turnon.6.XVar: tauPt -Turnon.6.Cut: hasHLTPath_8 -Turnon.6.WeightVar: bkgSubANDpuW -Turnon.6.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.6.FitRange:0 600 -Turnon.6.CB.Max: 1. 0.9 1. -Turnon.6.CB.Alpha: 0.1 0.01 50. -Turnon.6.CB.N: 20. 1.001 100. -Turnon.6.CB.Mean: 40. 0. 120. -Turnon.6.CB.Sigma: 3. 0.01 40 - -Turnon.7.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v -Turnon.7.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.7.Tree: TagAndProbe -Turnon.7.XVar: tauPt -Turnon.7.Cut: hasHLTPath_9 -Turnon.7.WeightVar: bkgSubW -Turnon.7.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.7.FitRange:0 600 -Turnon.7.CB.Max: 1. 0.9 1. -Turnon.7.CB.Alpha: 0.1 0.01 50. -Turnon.7.CB.N: 20. 1.001 100. -Turnon.7.CB.Mean: 40. 0. 120. -Turnon.7.CB.Sigma: 3. 0.01 40 - -Turnon.8.Name: HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.8.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.8.Tree: TagAndProbe -Turnon.8.XVar: tauPt -Turnon.8.Cut: hasHLTPath_10 -Turnon.8.WeightVar: bkgSubANDpuW -Turnon.8.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.8.FitRange:0 600 -Turnon.8.CB.Max: 1. 0.9 1. -Turnon.8.CB.Alpha: 0.1 0.01 50. -Turnon.8.CB.N: 20. 1.001 100. -Turnon.8.CB.Mean: 40. 0. 120. -Turnon.8.CB.Sigma: 3. 0.01 40 - -Turnon.9.Name: HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v -Turnon.9.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_DYJets_RunIIFall17MiniAOD-RECOSIMstep_94X_mc2017_realistic_v10-v1_14_01_2018_PU_forFit.root -Turnon.9.Tree: TagAndProbe -Turnon.9.XVar: tauPt -Turnon.9.Cut: hasHLTPath_11 -Turnon.9.WeightVar: bkgSubANDpuW -Turnon.9.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.9.FitRange:0 600 -Turnon.9.CB.Max: 1. 0.9 1. -Turnon.9.CB.Alpha: 0.1 0.01 50. -Turnon.9.CB.N: 20. 1.001 100. -Turnon.9.CB.Mean: 40. 0. 120. -Turnon.9.CB.Sigma: 3. 0.01 40 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY_2017completeList_newFunc.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY_2017completeList_newFunc.par deleted file mode 100644 index feb25d42706..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_DY_2017completeList_newFunc.par +++ /dev/null @@ -1,50 +0,0 @@ -OutputFile: results/FitResults/2018_07_17/fitOutput_MC_MuTau2017_DYJetsFall17_newFunction_mediumWP_realTau.root -NCPU: 4 - -Turnon.N: 3 -Turnon.1.Name: ETauTriggerPath_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30 -Turnon.1.File: /eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_DYJetsToLL_12Apr2018_v1Andext1v1_12062018_PU_1000binMC_OStauGenMatched_MediumWP2017v2_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTetauPath_13 -Turnon.1.WeightVar: bkgSubANDpuW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange: 20 500 -Turnon.1.CB.Max: 1.5 0.1 1.5 -Turnon.1.CB.Alpha: 0.01 0.01 1.0 -Turnon.1.CB.N: 3. 1.0 5. -Turnon.1.CB.Mean: -50. -20. -200. -Turnon.1.CB.Sigma: 5. 0.2 10.0 -Turnon.1.CB.YRise: 1.0 0.1 1.0 - - -Turnon.2.Name: MuTauTriggerPath_IsoMu20_LooseChargedIsoPFTau27 -Turnon.2.File: /eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_DYJetsToLL_12Apr2018_v1Andext1v1_12062018_PU_1000binMC_OStauGenMatched_MediumWP2017v2_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTmutauPath_13 -Turnon.2.WeightVar: bkgSubANDpuW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange: 0 600 -Turnon.2.CB.Max: 5.0 0.1 20.0 -Turnon.2.CB.Alpha: 0.01 0.01 1.0 -Turnon.2.CB.N: 50. 1.0 200. -Turnon.2.CB.Mean: -30. -20. -100. -Turnon.2.CB.Sigma: 4.0 0.5 15. -Turnon.2.CB.YRise: 5.0 0.1 5.5 - - -Turnon.3.Name: DiTauTriggerPath_TightTau35TightIDorMediumTau35TightIDplusHLTTau40orTightTau35plusHLTTau40_plusL1Tau32 -Turnon.3.File: /eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_DYJetsToLL_12Apr2018_v1Andext1v1_12062018_PU_1000binMC_OStauGenMatched_MediumWP2017v2_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTditauPath_9or10or11 -Turnon.3.WeightVar: bkgSubANDpuW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange: 0 500 -Turnon.3.CB.Max: 10. 0.1 50.0 -Turnon.3.CB.Alpha: 0.01 0.01 50.0 -Turnon.3.CB.N: 10. 1.0 150. -Turnon.3.CB.Mean: -40. -30.0 -100. -Turnon.3.CB.Sigma: 3.0 0.5 20. -Turnon.3.CB.YRise: 1.0 0.1 10.0 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017B.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017B.par deleted file mode 100644 index d0e6b7c5b6a..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017B.par +++ /dev/null @@ -1,56 +0,0 @@ -OutputFile: results/TestTurnOn/fitOutput_Data_MuTau2017B.root -NCPU: 4 - -Turnon.N: 3 - -Turnon.1.Name: HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v -Turnon.1.File: /data_CMS/cms/strebler/TauHLT/TagAndProbeTrees/TagAndProbe_MuTau_TandP_all/NTuple_MuTau2017B_Cv1_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTPath_0 -Turnon.1.SelectionVars: RunNumber -Turnon.1.Selection: RunNumber<299368 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 3. 0.01 50. -Turnon.1.CB.N: 10. 1.001 100. -Turnon.1.CB.Mean: 30. 0. 120. -Turnon.1.CB.Sigma: 2. 0.01 10 - - - -Turnon.2.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.2.File: /data_CMS/cms/strebler/TauHLT/TagAndProbeTrees/TagAndProbe_MuTau_TandP_all/NTuple_MuTau2017B_Cv1_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTPath_8 -Turnon.2.SelectionVars: RunNumber -Turnon.2.Selection: RunNumber<299368 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange:0 600 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 3. 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 2. 0.01 10 - - - -Turnon.3.Name: HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v -Turnon.3.File: /data_CMS/cms/strebler/TauHLT/TagAndProbeTrees/TagAndProbe_MuTau_TandP_all/NTuple_MuTau2017B_Cv1_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTPath_13 -Turnon.3.SelectionVars: RunNumber -Turnon.3.Selection: RunNumber<299368 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange:0 600 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 3. 0.01 50. -Turnon.3.CB.N: 10. 1.001 100. -Turnon.3.CB.Mean: 30. 0. 120. -Turnon.3.CB.Sigma: 2. 0.01 10 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF.par deleted file mode 100644 index 96be6a42f2a..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF.par +++ /dev/null @@ -1,55 +0,0 @@ -OutputFile: results/FitResults/2018_01_14/fitOutput_Data_MuTau2017BCDEF.root -NCPU: 4 - -Turnon.N: 3 -Turnon.1.Name: HLT_IsoMu24_eta2p1_LooseChargedIsoPFTau20_SingleL1_v -Turnon.1.File:/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTPath_0 -#Turnon.1.SelectionVars: RunNumber -#Turnon.1.Selection: RunNumber<306127 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 0.1 0.01 50. -Turnon.1.CB.N: 30. 1.001 100. -Turnon.1.CB.Mean: 40. 0. 120. -Turnon.1.CB.Sigma: 5 0.01 30 - - - -Turnon.2.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.2.File:/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTPath_8 -#Turnon.2.SelectionVars: RunNumber -#Turnon.2.Selection: RunNumber<306127 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange:0 600 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 0.1 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 3. 0.01 10 - - - -Turnon.3.Name: HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v -Turnon.3.File:/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTPath_13 -#Turnon.3.SelectionVars: RunNumber -#Turnon.3.Selection: RunNumber<306127 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange:0 600 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 0.1 0.01 50. -Turnon.3.CB.N: 30. 1.001 100. -Turnon.3.CB.Mean: 40. 0. 120. -Turnon.3.CB.Sigma: 4 0.01 20 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF_2017completeList.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF_2017completeList.par deleted file mode 100644 index 99edec07d88..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF_2017completeList.par +++ /dev/null @@ -1,138 +0,0 @@ -OutputFile: results/FitResults/2018_01_14/fitOutput_Data_MuTau2017BCDEF_newVersion.root -NCPU: 4 - -Turnon.N: 9 -Turnon.1.Name: ETauTriggerPath_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30 -Turnon.1.File:/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTetauPath_13 -#Turnon.1.SelectionVars: RunNumber -#Turnon.1.Selection: RunNumber<306127 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 0.1 0.01 50. -Turnon.1.CB.N: 30. 1.001 100. -Turnon.1.CB.Mean: 40. 0. 120. -Turnon.1.CB.Sigma: 5 0.01 30 - - -Turnon.2.Name: MuTauTriggerPath_IsoMu20_LooseChargedIsoPFTau27 -Turnon.2.File:/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTmutauPath_13 -#Turnon.2.SelectionVars: RunNumber -#Turnon.2.Selection: RunNumber<306127 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange:0 600 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 0.1 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 3. 0.01 10 - - -Turnon.3.Name: DiTauTriggerPath_TightTau35orMediumTau40TightIDorTightTau40_plusL1Tau32 -Turnon.3.File:/afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTditauPath_11or20or21 -#Turnon.3.SelectionVars: RunNumber -#Turnon.3.Selection: RunNumber<306127 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange:0 600 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 0.1 0.01 50. -Turnon.3.CB.N: 30. 1.001 100. -Turnon.3.CB.Mean: 40. 0. 120. -Turnon.3.CB.Sigma: 4 0.01 20 - -Turnon.4.Name: DiTauTriggerPath_TightTau35TightIDorMediumTau35TightIDplusHLTTau40orTightTau35plusHLTTau40_plusL1Tau32 -Turnon.4.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.4.Tree: TagAndProbe -Turnon.4.XVar: tauPt -Turnon.4.Cut: hasHLTditauPath_9or10or11 -Turnon.4.WeightVar: bkgSubW -Turnon.4.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.4.FitRange:0 600 -Turnon.4.CB.Max: 1. 0.9 1. -Turnon.4.CB.Alpha: 0.1 0.01 50. -Turnon.4.CB.N: 20. 1.001 100. -Turnon.4.CB.Mean: 40. 0. 120. -Turnon.4.CB.Sigma: 3. 0.01 40 - -Turnon.5.Name: HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v -Turnon.5.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.5.Tree: TagAndProbe -Turnon.5.XVar: tauPt -Turnon.5.Cut: hasHLTPath_13 -Turnon.5.WeightVar: bkgSubW -Turnon.5.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.5.FitRange:0 600 -Turnon.5.CB.Max: 1. 0.9 1. -Turnon.5.CB.Alpha: 0.1 0.01 50. -Turnon.5.CB.N: 20. 1.001 100. -Turnon.5.CB.Mean: 40. 0. 120. -Turnon.5.CB.Sigma: 3. 0.01 40 - -Turnon.6.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.6.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.6.Tree: TagAndProbe -Turnon.6.XVar: tauPt -Turnon.6.Cut: hasHLTPath_8 -Turnon.6.WeightVar: bkgSubW -Turnon.6.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.6.FitRange:0 600 -Turnon.6.CB.Max: 1. 0.9 1. -Turnon.6.CB.Alpha: 0.1 0.01 50. -Turnon.6.CB.N: 20. 1.001 100. -Turnon.6.CB.Mean: 40. 0. 120. -Turnon.6.CB.Sigma: 3. 0.01 40 - - -Turnon.7.Name: HLT_IsoMu24_eta2p1_MediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v -Turnon.7.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.7.Tree: TagAndProbe -Turnon.7.XVar: tauPt -Turnon.7.Cut: hasHLTPath_9 -Turnon.7.WeightVar: bkgSubW -Turnon.7.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.7.FitRange:0 600 -Turnon.7.CB.Max: 1. 0.9 1. -Turnon.7.CB.Alpha: 0.1 0.01 50. -Turnon.7.CB.N: 20. 1.001 100. -Turnon.7.CB.Mean: 40. 0. 120. -Turnon.7.CB.Sigma: 3. 0.01 40 - -Turnon.8.Name: HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_eta2p1_Reg_CrossL1_v -Turnon.8.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.8.Tree: TagAndProbe -Turnon.8.XVar: tauPt -Turnon.8.Cut: hasHLTPath_10 -Turnon.8.WeightVar: bkgSubW -Turnon.8.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.8.FitRange:0 600 -Turnon.8.CB.Max: 1. 0.9 1. -Turnon.8.CB.Alpha: 0.1 0.01 50. -Turnon.8.CB.N: 20. 1.001 100. -Turnon.8.CB.Mean: 40. 0. 120. -Turnon.8.CB.Sigma: 3. 0.01 40 - -Turnon.9.Name: HLT_IsoMu24_eta2p1_TightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_CrossL1_v -Turnon.9.File: /afs/cern.ch/user/h/hsert/TriggerStudies/ForkedRepo/Samples/2018_01_14/NTuple_Data2017BCDEF_17Nov2017-v1_14_01_2018_forFit.root -Turnon.9.Tree: TagAndProbe -Turnon.9.XVar: tauPt -Turnon.9.Cut: hasHLTPath_11 -Turnon.9.WeightVar: bkgSubW -Turnon.9.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.9.FitRange:0 600 -Turnon.9.CB.Max: 1. 0.9 1. -Turnon.9.CB.Alpha: 0.1 0.01 50. -Turnon.9.CB.N: 20. 1.001 100. -Turnon.9.CB.Mean: 40. 0. 120. -Turnon.9.CB.Sigma: 3. 0.01 40 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF_2017completeList_newFunc.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF_2017completeList_newFunc.par deleted file mode 100644 index bc44716e67d..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_Run2017BCDEF_2017completeList_newFunc.par +++ /dev/null @@ -1,50 +0,0 @@ -OutputFile: results/FitResults/2018_07_17/fitOutput_Data_MuTau2017BCDEF_newFunction_mediumWP_realTau.root -NCPU: 4 - -Turnon.N: 3 -Turnon.1.Name: ETauTriggerPath_IsoMu20_LooseChargedIsoPFTau27_plusL1Tau26andHLTTau30 -Turnon.1.File:/eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_Data_Run2017BCDEF_31Mar2018_12062018_SSsubtraction_MediumWP2017v2_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTetauPath_13 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.FitRange:20 500 -Turnon.1.CB.Max: 1.5 0.1 1.5 -Turnon.1.CB.Alpha: 0.01 0.01 1.0 -Turnon.1.CB.N: 50. 1.0 150. -Turnon.1.CB.Mean: -30. -20. -200. -Turnon.1.CB.Sigma: 12.0 0.2 12. -Turnon.1.CB.YRise: 1.0 0.1 1.0 - - -Turnon.2.Name: MuTauTriggerPath_IsoMu20_LooseChargedIsoPFTau27 -Turnon.2.File:/eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_Data_Run2017BCDEF_31Mar2018_12062018_SSsubtraction_MediumWP2017v2_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasHLTmutauPath_13 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.2.FitRange:0 600 -Turnon.2.CB.Max: 1.5 0.1 1.5 -Turnon.2.CB.Alpha: 0.01 0.01 1.0 -Turnon.2.CB.N: 3. 1.0 50. -Turnon.2.CB.Mean: -50. -20. -200. -Turnon.2.CB.Sigma: 1.0 0.1 10.0 -Turnon.2.CB.YRise: 1.0 0.5 1.0 - -Turnon.3.Name: DiTauTriggerPath_TightTau35TightIDorMediumTau35TightIDplusHLTTau40orTightTau35plusHLTTau40_plusL1Tau32 -Turnon.3.File: /eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_Data_Run2017BCDEF_31Mar2018_12062018_SSsubtraction_MediumWP2017v2_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasHLTditauPath_9or10or11 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.3.FitRange:0 600 -Turnon.3.CB.Max: 10. 0.1 50.0 -Turnon.3.CB.Alpha: 0.01 0.01 50.0 -Turnon.3.CB.N: 10. 1.0 150. -Turnon.3.CB.Mean: -40. -30.0 -100. -Turnon.3.CB.Sigma: 3.0 0.5 15. -Turnon.3.CB.YRise: 1.0 0.1 10.0 diff --git a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_test.par b/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_test.par deleted file mode 100644 index 1e6e13acd6e..00000000000 --- a/TauTagAndProbe/test/fitter/run/hlt_turnOn_fitter_test.par +++ /dev/null @@ -1,19 +0,0 @@ -OutputFile: results/FitResults/2018_07_17/fitOutput_test.root -NCPU: 4 - -Turnon.N: 1 -Turnon.1.Name: DiTauTriggerPath_TightTau35TightIDorMediumTau35TightIDplusHLTTau40orTightTau35plusHLTTau40_plusL1Tau32 -Turnon.1.File: /eos/user/h/hsert/TriggerStudies/ForkedRepo/Samples/12062018/NTuple_Data_Run2017BCDEF_31Mar2018_12062018_SSsubtraction_MediumWP2017v2_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTditauPath_9or10or11 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 20 25 30 35 40 45 50 60 80 100 150 200 500 -Turnon.1.FitRange: 0 600 -Turnon.1.CB.Max: 0.96715 0.7 1.2 -Turnon.1.CB.Alpha: 0.59355 0.1 0.9 -Turnon.1.CB.N: 1.8296 1.0 3.6 -Turnon.1.CB.Mean: -36.411 -100. 10. -Turnon.1.CB.Sigma: 3.9488 2.3 6.5 -Turnon.1.CB.YRise: 0.98019 0.86 1.0 - diff --git a/TauTagAndProbe/test/fitter/run/stage2_turOn_fitter_template_config.par b/TauTagAndProbe/test/fitter/run/stage2_turOn_fitter_template_config.par deleted file mode 100644 index 312670b6c1e..00000000000 --- a/TauTagAndProbe/test/fitter/run/stage2_turOn_fitter_template_config.par +++ /dev/null @@ -1,110 +0,0 @@ -OutputFile: results/TestTurnOn/fitOutput.root -NCPU: 50 - -Turnon.N: 6 - - - -## Stage-2 All -Turnon.1.Name: Stage2_All_vs_Pt_26GeV -Turnon.1.File: ../Run2016B-H/result_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasL1_26 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 3. 0.01 50. -Turnon.1.CB.N: 10. 1.001 100. -Turnon.1.CB.Mean: 30. 0. 120. -Turnon.1.CB.Sigma: 2. 0.01 10 -Turnon.1.CB.Mturn: 20. 0. 120. -Turnon.1.CB.P: 1 1 1. -Turnon.1.CB.Width: 10. 1. 100. - -Turnon.2.Name: Stage2_All_vs_Pt_26GeV_iso -Turnon.2.File: ../Run2016B-H/result_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasL1_26_iso -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.2.FitRange:0 500 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 3. 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 2. 0.01 10 -Turnon.2.CB.Mturn: 20. 0. 120. -Turnon.2.CB.P: 0.8 0.2 1. -Turnon.2.CB.Width: 10. 1. 100. - -Turnon.3.Name: Stage2_All_vs_Pt_30GeV -Turnon.3.File: ../Run2016B-H/result_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasL1_30 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.3.FitRange:0 500 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 3. 0.01 50. -Turnon.3.CB.N: 10. 1.001 100. -Turnon.3.CB.Mean: 30. 0. 120. -Turnon.3.CB.Sigma: 2. 0.01 10 -Turnon.3.CB.Mturn: 20. 0. 120. -Turnon.3.CB.P: 1 1 1. -Turnon.3.CB.Width: 10. 1. 100. - -Turnon.4.Name: Stage2_All_vs_Pt_30GeV_iso -Turnon.4.File: ../Run2016B-H/result_forFit.root -Turnon.4.Tree: TagAndProbe -Turnon.4.XVar: tauPt -Turnon.4.Cut: hasL1_30_iso -Turnon.4.WeightVar: bkgSubW -Turnon.4.Binning: 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.4.FitRange:0 500 -Turnon.4.CB.Max: 1. 0.9 1. -Turnon.4.CB.Alpha: 3. 0.01 50. -Turnon.4.CB.N: 10. 1.001 100. -Turnon.4.CB.Mean: 30. 0. 120. -Turnon.4.CB.Sigma: 2. 0.01 10 -Turnon.4.CB.Mturn: 20. 0. 120. -Turnon.4.CB.P: 0.8 0.2 1. -Turnon.4.CB.Width: 10. 1. 100. - -Turnon.5.Name: Stage2_All_vs_Pt_34GeV -Turnon.5.File: ../Run2016B-H/result_forFit.root -Turnon.5.Tree: TagAndProbe -Turnon.5.XVar: tauPt -Turnon.5.Cut: hasL1_34 -Turnon.5.WeightVar: bkgSubW -Turnon.5.Binning: 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.5.FitRange:0 500 -Turnon.5.CB.Max: 1. 0.9 1. -Turnon.5.CB.Alpha: 3. 0.01 50. -Turnon.5.CB.N: 10. 1.001 100. -Turnon.5.CB.Mean: 30. 0. 120. -Turnon.5.CB.Sigma: 2. 0.01 10 -Turnon.5.CB.Mturn: 20. 0. 120. -Turnon.5.CB.P: 0.8 0.2 1. -Turnon.5.CB.Width: 10. 1. 100. - -Turnon.6.Name: Stage2_All_vs_Pt_34GeV_iso -Turnon.6.File: ../Run2016B-H/result_forFit.root -Turnon.6.Tree: TagAndProbe -Turnon.6.XVar: tauPt -Turnon.6.Cut: hasL1_34_iso -Turnon.6.WeightVar: bkgSubW -Turnon.6.Binning: 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.6.FitRange:0 500 -Turnon.6.CB.Max: 1. 0.9 1. -Turnon.6.CB.Alpha: 3. 0.01 50. -Turnon.6.CB.N: 10. 1.001 100. -Turnon.6.CB.Mean: 30. 0. 120. -Turnon.6.CB.Sigma: 2. 0.01 10 -Turnon.6.CB.Mturn: 20. 0. 120. -Turnon.6.CB.P: 1 1 1. -Turnon.6.CB.Width: 10. 1. 100. - diff --git a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter.par b/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter.par deleted file mode 100644 index db2f3b12e50..00000000000 --- a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter.par +++ /dev/null @@ -1,573 +0,0 @@ -OutputFile: results/TestTurnOn/fitOutput.root -NCPU: 4 - -Turnon.N: 36 - - -## Stage-2 Barrel -Turnon.1.Name: Stage2_Barrel_vs_Pt_26GeV -Turnon.1.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasL1_26 -Turnon.1.SelectionVars: tauEta -Turnon.1.Selection: abs(tauEta)<1.305 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 3. 0.01 50. -Turnon.1.CB.N: 10. 1.001 100. -Turnon.1.CB.Mean: 30. 0. 120. -Turnon.1.CB.Sigma: 2. 0.01 10 - -Turnon.2.Name: Stage2_Barrel_vs_Pt_26GeV_iso -Turnon.2.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasL1_26_iso -Turnon.2.SelectionVars: tauEta -Turnon.2.Selection: abs(tauEta)<1.305 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.2.FitRange:0 500 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 3. 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 2. 0.01 10 - -Turnon.3.Name: Stage2_Barrel_vs_Pt_30GeV -Turnon.3.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasL1_30 -Turnon.3.SelectionVars: tauEta -Turnon.3.Selection: abs(tauEta)<1.305 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.3.FitRange:0 500 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 3. 0.01 50. -Turnon.3.CB.N: 10. 1.001 100. -Turnon.3.CB.Mean: 30. 0. 120. -Turnon.3.CB.Sigma: 2. 0.01 10 - -Turnon.4.Name: Stage2_Barrel_vs_Pt_30GeV_iso -Turnon.4.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.4.Tree: TagAndProbe -Turnon.4.XVar: tauPt -Turnon.4.Cut: hasL1_30_iso -Turnon.4.SelectionVars: tauEta -Turnon.4.WeightVar: bkgSubW -Turnon.4.Selection: abs(tauEta)<1.305 -Turnon.4.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.4.FitRange:0 500 -Turnon.4.CB.Max: 1. 0.9 1. -Turnon.4.CB.Alpha: 3. 0.01 50. -Turnon.4.CB.N: 10. 1.001 100. -Turnon.4.CB.Mean: 30. 0. 120. -Turnon.4.CB.Sigma: 2. 0.01 10 - -Turnon.5.Name: Stage2_Barrel_vs_Pt_34GeV -Turnon.5.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.5.Tree: TagAndProbe -Turnon.5.XVar: tauPt -Turnon.5.Cut: hasL1_34 -Turnon.5.SelectionVars: tauEta -Turnon.5.Selection: abs(tauEta)<1.305 -Turnon.5.WeightVar: bkgSubW -Turnon.5.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.5.FitRange:0 500 -Turnon.5.CB.Max: 1. 0.9 1. -Turnon.5.CB.Alpha: 3. 0.01 50. -Turnon.5.CB.N: 10. 1.001 100. -Turnon.5.CB.Mean: 30. 0. 120. -Turnon.5.CB.Sigma: 2. 0.01 10 - -Turnon.6.Name: Stage2_Barrel_vs_Pt_34GeV_iso -Turnon.6.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.6.Tree: TagAndProbe -Turnon.6.XVar: tauPt -Turnon.6.Cut: hasL1_34_iso -Turnon.6.SelectionVars: tauEta -Turnon.6.Selection: abs(tauEta)<1.305 -Turnon.6.WeightVar: bkgSubW -Turnon.6.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.6.FitRange:0 500 -Turnon.6.CB.Max: 1. 0.9 1. -Turnon.6.CB.Alpha: 3. 0.01 50. -Turnon.6.CB.N: 10. 1.001 100. -Turnon.6.CB.Mean: 30. 0. 120. -Turnon.6.CB.Sigma: 2. 0.01 10 - - - -## Stage-2 Endcaps -Turnon.7.Name: Stage2_Endcaps_vs_Pt_26GeV -Turnon.7.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.7.Tree: TagAndProbe -Turnon.7.XVar: tauPt -Turnon.7.Cut: hasL1_26 -Turnon.7.SelectionVars: tauEta -Turnon.7.Selection: abs(tauEta)>1.479 -Turnon.7.WeightVar: bkgSubW -Turnon.7.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.7.FitRange:0 500 -Turnon.7.CB.Max: 1. 0.9 1. -Turnon.7.CB.Alpha: 3. 0.01 50. -Turnon.7.CB.N: 10. 1.001 100. -Turnon.7.CB.Mean: 30. 0. 120. -Turnon.7.CB.Sigma: 2. 0.01 10 - -Turnon.8.Name: Stage2_Endcaps_vs_Pt_26GeV_iso -Turnon.8.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.8.Tree: TagAndProbe -Turnon.8.XVar: tauPt -Turnon.8.Cut: hasL1_26_iso -Turnon.8.SelectionVars: tauEta -Turnon.8.Selection: abs(tauEta)>1.479 -Turnon.8.WeightVar: bkgSubW -Turnon.8.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.8.FitRange:0 500 -Turnon.8.CB.Max: 1. 0.9 1. -Turnon.8.CB.Alpha: 3. 0.01 50. -Turnon.8.CB.N: 10. 1.001 100. -Turnon.8.CB.Mean: 30. 0. 120. -Turnon.8.CB.Sigma: 2. 0.01 10 - -Turnon.9.Name: Stage2_Endcaps_vs_Pt_30GeV -Turnon.9.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.9.Tree: TagAndProbe -Turnon.9.XVar: tauPt -Turnon.9.Cut: hasL1_30 -Turnon.9.SelectionVars: tauEta -Turnon.9.Selection: abs(tauEta)>1.479 -Turnon.9.WeightVar: bkgSubW -Turnon.9.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.9.FitRange:0 500 -Turnon.9.CB.Max: 1. 0.9 1. -Turnon.9.CB.Alpha: 3. 0.01 50. -Turnon.9.CB.N: 10. 1.001 100. -Turnon.9.CB.Mean: 30. 0. 120. -Turnon.9.CB.Sigma: 2. 0.01 10 - -Turnon.10.Name: Stage2_Endcaps_vs_Pt_30GeV_iso -Turnon.10.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.10.Tree: TagAndProbe -Turnon.10.XVar: tauPt -Turnon.10.Cut: hasL1_30_iso -Turnon.10.SelectionVars: tauEta -Turnon.10.Selection: abs(tauEta)>1.479 -Turnon.10.WeightVar: bkgSubW -Turnon.10.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.10.FitRange:0 500 -Turnon.10.CB.Max: 1. 0.9 1. -Turnon.10.CB.Alpha: 3. 0.01 50. -Turnon.10.CB.N: 10. 1.001 100. -Turnon.10.CB.Mean: 30. 0. 120. -Turnon.10.CB.Sigma: 2. 0.01 10 - -Turnon.11.Name: Stage2_Endcaps_vs_Pt_34GeV -Turnon.11.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.11.Tree: TagAndProbe -Turnon.11.XVar: tauPt -Turnon.11.Cut: hasL1_34 -Turnon.11.SelectionVars: tauEta -Turnon.11.Selection: abs(tauEta)>1.479 -Turnon.11.WeightVar: bkgSubW -Turnon.11.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.11.FitRange:0 500 -Turnon.11.CB.Max: 1. 0.9 1. -Turnon.11.CB.Alpha: 3. 0.01 50. -Turnon.11.CB.N: 10. 1.001 100. -Turnon.11.CB.Mean: 30. 0. 120. -Turnon.11.CB.Sigma: 2. 0.01 10 - -Turnon.12.Name: Stage2_Endcaps_vs_Pt_34GeV_iso -Turnon.12.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.12.Tree: TagAndProbe -Turnon.12.XVar: tauPt -Turnon.12.Cut: hasL1_34_iso -Turnon.12.SelectionVars: tauEta -Turnon.12.Selection: abs(tauEta)>1.479 -Turnon.12.WeightVar: bkgSubW -Turnon.12.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.12.FitRange:0 500 -Turnon.12.CB.Max: 1. 0.9 1. -Turnon.12.CB.Alpha: 3. 0.01 50. -Turnon.12.CB.N: 10. 1.001 100. -Turnon.12.CB.Mean: 30. 0. 120. -Turnon.12.CB.Sigma: 2. 0.01 10 - - - -## Stage-2 All -Turnon.13.Name: Stage2_All_vs_Pt_26GeV -Turnon.13.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.13.Tree: TagAndProbe -Turnon.13.XVar: tauPt -Turnon.13.Cut: hasL1_26 -Turnon.13.WeightVar: bkgSubW -Turnon.13.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.13.FitRange:0 500 -Turnon.13.CB.Max: 1. 0.9 1. -Turnon.13.CB.Alpha: 3. 0.01 50. -Turnon.13.CB.N: 10. 1.001 100. -Turnon.13.CB.Mean: 30. 0. 120. -Turnon.13.CB.Sigma: 2. 0.01 10 - -Turnon.14.Name: Stage2_All_vs_Pt_26GeV_iso -Turnon.14.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.14.Tree: TagAndProbe -Turnon.14.XVar: tauPt -Turnon.14.Cut: hasL1_26_iso -Turnon.14.WeightVar: bkgSubW -Turnon.14.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.14.FitRange:0 500 -Turnon.14.CB.Max: 1. 0.9 1. -Turnon.14.CB.Alpha: 3. 0.01 50. -Turnon.14.CB.N: 10. 1.001 100. -Turnon.14.CB.Mean: 30. 0. 120. -Turnon.14.CB.Sigma: 2. 0.01 10 - -Turnon.15.Name: Stage2_All_vs_Pt_30GeV -Turnon.15.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.15.Tree: TagAndProbe -Turnon.15.XVar: tauPt -Turnon.15.Cut: hasL1_30 -Turnon.15.WeightVar: bkgSubW -Turnon.15.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.15.FitRange:0 500 -Turnon.15.CB.Max: 1. 0.9 1. -Turnon.15.CB.Alpha: 3. 0.01 50. -Turnon.15.CB.N: 10. 1.001 100. -Turnon.15.CB.Mean: 30. 0. 120. -Turnon.15.CB.Sigma: 2. 0.01 10 - -Turnon.16.Name: Stage2_All_vs_Pt_30GeV_iso -Turnon.16.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.16.Tree: TagAndProbe -Turnon.16.XVar: tauPt -Turnon.16.Cut: hasL1_30_iso -Turnon.16.WeightVar: bkgSubW -Turnon.16.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.16.FitRange:0 500 -Turnon.16.CB.Max: 1. 0.9 1. -Turnon.16.CB.Alpha: 3. 0.01 50. -Turnon.16.CB.N: 10. 1.001 100. -Turnon.16.CB.Mean: 30. 0. 120. -Turnon.16.CB.Sigma: 2. 0.01 10 - -Turnon.17.Name: Stage2_All_vs_Pt_34GeV -Turnon.17.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.17.Tree: TagAndProbe -Turnon.17.XVar: tauPt -Turnon.17.Cut: hasL1_34 -Turnon.17.WeightVar: bkgSubW -Turnon.17.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.17.FitRange:0 500 -Turnon.17.CB.Max: 1. 0.9 1. -Turnon.17.CB.Alpha: 3. 0.01 50. -Turnon.17.CB.N: 10. 1.001 100. -Turnon.17.CB.Mean: 30. 0. 120. -Turnon.17.CB.Sigma: 2. 0.01 10 - -Turnon.18.Name: Stage2_All_vs_Pt_34GeV_iso -Turnon.18.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.18.Tree: TagAndProbe -Turnon.18.XVar: tauPt -Turnon.18.Cut: hasL1_34_iso -Turnon.18.WeightVar: bkgSubW -Turnon.18.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.18.FitRange:0 500 -Turnon.18.CB.Max: 1. 0.9 1. -Turnon.18.CB.Alpha: 3. 0.01 50. -Turnon.18.CB.N: 10. 1.001 100. -Turnon.18.CB.Mean: 30. 0. 120. -Turnon.18.CB.Sigma: 2. 0.01 10 - -## HLT PATHS - -## Stage-2 Barrel -Turnon.19.Name: Stage2_Barrel_vs_HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v -Turnon.19.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.19.Tree: TagAndProbe -Turnon.19.XVar: tauPt -Turnon.19.Cut: hasHLTPath_0 -Turnon.19.SelectionVars: tauEta -Turnon.19.Selection: abs(tauEta)<1.305 -Turnon.19.WeightVar: bkgSubW -Turnon.19.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.19.FitRange:0 500 -Turnon.19.CB.Max: 1. 0.9 1. -Turnon.19.CB.Alpha: 3. 0.01 50. -Turnon.19.CB.N: 10. 1.001 100. -Turnon.19.CB.Mean: 30. 0. 120. -Turnon.19.CB.Sigma: 2. 0.01 10 - -Turnon.20.Name: Stage2_Barrel_vs_HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.20.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.20.Tree: TagAndProbe -Turnon.20.XVar: tauPt -Turnon.20.Cut: hasHLTPath_1 -Turnon.20.SelectionVars: tauEta -Turnon.20.Selection: abs(tauEta)<1.305 -Turnon.20.WeightVar: bkgSubW -Turnon.20.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.20.FitRange:0 500 -Turnon.20.CB.Max: 1. 0.9 1. -Turnon.20.CB.Alpha: 3. 0.01 50. -Turnon.20.CB.N: 10. 1.001 100. -Turnon.20.CB.Mean: 30. 0. 120. -Turnon.20.CB.Sigma: 2. 0.01 10 - -Turnon.21.Name: Stage2_Barrel_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v -Turnon.21.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.21.Tree: TagAndProbe -Turnon.21.XVar: tauPt -Turnon.21.Cut: hasHLTPath_2 -Turnon.21.SelectionVars: tauEta -Turnon.21.Selection: abs(tauEta)<1.305 -Turnon.21.WeightVar: bkgSubW -Turnon.21.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.21.FitRange:0 500 -Turnon.21.CB.Max: 1. 0.9 1. -Turnon.21.CB.Alpha: 3. 0.01 50. -Turnon.21.CB.N: 10. 1.001 100. -Turnon.21.CB.Mean: 30. 0. 120. -Turnon.21.CB.Sigma: 2. 0.01 10 - -Turnon.22.Name: Stage2_Barrel_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.22.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.22.Tree: TagAndProbe -Turnon.22.XVar: tauPt -Turnon.22.Cut: hasHLTPath_3 -Turnon.22.SelectionVars: tauEta -Turnon.22.Selection: abs(tauEta)<1.305 -Turnon.22.WeightVar: bkgSubW -Turnon.22.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.22.FitRange:0 500 -Turnon.22.CB.Max: 1. 0.9 1. -Turnon.22.CB.Alpha: 3. 0.01 50. -Turnon.22.CB.N: 10. 1.001 100. -Turnon.22.CB.Mean: 30. 0. 120. -Turnon.22.CB.Sigma: 2. 0.01 10 - -Turnon.23.Name: Stage2_Barrel_vs_HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.23.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.23.Tree: TagAndProbe -Turnon.23.XVar: tauPt -Turnon.23.Cut: hasHLTPath_4 -Turnon.23.SelectionVars: tauEta -Turnon.23.Selection: abs(tauEta)<1.305 -Turnon.23.WeightVar: bkgSubW -Turnon.23.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.23.FitRange:0 500 -Turnon.23.CB.Max: 1. 0.9 1. -Turnon.23.CB.Alpha: 3. 0.01 50. -Turnon.23.CB.N: 10. 1.001 100. -Turnon.23.CB.Mean: 30. 0. 120. -Turnon.23.CB.Sigma: 2. 0.01 10 - -Turnon.24.Name: Stage2_Barrel_vs_HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v -Turnon.24.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.24.Tree: TagAndProbe -Turnon.24.XVar: tauPt -Turnon.24.Cut: hasHLTPath_5 -Turnon.24.SelectionVars: tauEta -Turnon.24.Selection: abs(tauEta)<1.305 -Turnon.24.WeightVar: bkgSubW -Turnon.24.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.24.FitRange:0 500 -Turnon.24.CB.Max: 1. 0.9 1. -Turnon.24.CB.Alpha: 3. 0.01 50. -Turnon.24.CB.N: 10. 1.001 100. -Turnon.24.CB.Mean: 30. 0. 120. -Turnon.24.CB.Sigma: 2. 0.01 10 - - - -## Stage-2 Endcaps -Turnon.25.Name: Stage2_Endcaps_vs_HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v -Turnon.25.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.25.Tree: TagAndProbe -Turnon.25.XVar: tauPt -Turnon.25.Cut: hasHLTPath_0 -Turnon.25.SelectionVars: tauEta -Turnon.25.Selection: abs(tauEta)>1.479 -Turnon.25.WeightVar: bkgSubW -Turnon.25.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.25.FitRange:0 500 -Turnon.25.CB.Max: 1. 0.9 1. -Turnon.25.CB.Alpha: 3. 0.01 50. -Turnon.25.CB.N: 10. 1.001 100. -Turnon.25.CB.Mean: 30. 0. 120. -Turnon.25.CB.Sigma: 2. 0.01 10 - -Turnon.26.Name: Stage2_Endcaps_vs_HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.26.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.26.Tree: TagAndProbe -Turnon.26.XVar: tauPt -Turnon.26.Cut: hasHLTPath_1 -Turnon.26.SelectionVars: tauEta -Turnon.26.Selection: abs(tauEta)>1.479 -Turnon.26.WeightVar: bkgSubW -Turnon.26.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.26.FitRange:0 500 -Turnon.26.CB.Max: 1. 0.9 1. -Turnon.26.CB.Alpha: 3. 0.01 50. -Turnon.26.CB.N: 10. 1.001 100. -Turnon.26.CB.Mean: 30. 0. 120. -Turnon.26.CB.Sigma: 2. 0.01 10 - -Turnon.27.Name: Stage2_Endcaps_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v -Turnon.27.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.27.Tree: TagAndProbe -Turnon.27.XVar: tauPt -Turnon.27.Cut: hasHLTPath_2 -Turnon.27.SelectionVars: tauEta -Turnon.27.Selection: abs(tauEta)>1.479 -Turnon.27.WeightVar: bkgSubW -Turnon.27.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.27.FitRange:0 500 -Turnon.27.CB.Max: 1. 0.9 1. -Turnon.27.CB.Alpha: 3. 0.01 50. -Turnon.27.CB.N: 10. 1.001 100. -Turnon.27.CB.Mean: 30. 0. 120. -Turnon.27.CB.Sigma: 2. 0.01 10 - -Turnon.28.Name: Stage2_Endcaps_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.28.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.28.Tree: TagAndProbe -Turnon.28.XVar: tauPt -Turnon.28.Cut: hasHLTPath_3 -Turnon.28.SelectionVars: tauEta -Turnon.28.Selection: abs(tauEta)>1.479 -Turnon.28.WeightVar: bkgSubW -Turnon.28.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.28.FitRange:0 500 -Turnon.28.CB.Max: 1. 0.9 1. -Turnon.28.CB.Alpha: 3. 0.01 50. -Turnon.28.CB.N: 10. 1.001 100. -Turnon.28.CB.Mean: 30. 0. 120. -Turnon.28.CB.Sigma: 2. 0.01 10 - -Turnon.29.Name: Stage2_Endcaps_vs_HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.29.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.29.Tree: TagAndProbe -Turnon.29.XVar: tauPt -Turnon.29.Cut: hasHLTPath_4 -Turnon.29.SelectionVars: tauEta -Turnon.29.Selection: abs(tauEta)>1.479 -Turnon.29.WeightVar: bkgSubW -Turnon.29.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.29.FitRange:0 500 -Turnon.29.CB.Max: 1. 0.9 1. -Turnon.29.CB.Alpha: 3. 0.01 50. -Turnon.29.CB.N: 10. 1.001 100. -Turnon.29.CB.Mean: 30. 0. 120. -Turnon.29.CB.Sigma: 2. 0.01 10 - -Turnon.30.Name: Stage2_Endcaps_vs_HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v -Turnon.30.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.30.Tree: TagAndProbe -Turnon.30.XVar: tauPt -Turnon.30.Cut: hasHLTPath_5 -Turnon.30.SelectionVars: tauEta -Turnon.30.Selection: abs(tauEta)>1.479 -Turnon.30.WeightVar: bkgSubW -Turnon.30.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.30.FitRange:0 500 -Turnon.30.CB.Max: 1. 0.9 1. -Turnon.30.CB.Alpha: 3. 0.01 50. -Turnon.30.CB.N: 10. 1.001 100. -Turnon.30.CB.Mean: 30. 0. 120. -Turnon.30.CB.Sigma: 2. 0.01 10 - - - -## Stage-2 All -Turnon.31.Name: Stage2_All_vs_HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v -Turnon.31.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.31.Tree: TagAndProbe -Turnon.31.XVar: tauPt -Turnon.31.Cut: hasHLTPath_0 -Turnon.31.WeightVar: bkgSubW -Turnon.31.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.31.FitRange:0 500 -Turnon.31.CB.Max: 1. 0.9 1. -Turnon.31.CB.Alpha: 3. 0.01 50. -Turnon.31.CB.N: 10. 1.001 100. -Turnon.31.CB.Mean: 30. 0. 120. -Turnon.31.CB.Sigma: 2. 0.01 10 - -Turnon.32.Name: Stage2_All_vs_HLT_IsoMu17_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.32.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.32.Tree: TagAndProbe -Turnon.32.XVar: tauPt -Turnon.32.Cut: hasHLTPath_1 -Turnon.32.WeightVar: bkgSubW -Turnon.32.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.32.FitRange:0 500 -Turnon.32.CB.Max: 1. 0.9 1. -Turnon.32.CB.Alpha: 3. 0.01 50. -Turnon.32.CB.N: 10. 1.001 100. -Turnon.32.CB.Mean: 30. 0. 120. -Turnon.32.CB.Sigma: 2. 0.01 10 - -Turnon.33.Name: Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v -Turnon.33.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.33.Tree: TagAndProbe -Turnon.33.XVar: tauPt -Turnon.33.Cut: hasHLTPath_2 -Turnon.33.WeightVar: bkgSubW -Turnon.33.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.33.FitRange:0 500 -Turnon.33.CB.Max: 1. 0.9 1. -Turnon.33.CB.Alpha: 3. 0.01 50. -Turnon.33.CB.N: 10. 1.001 100. -Turnon.33.CB.Mean: 30. 0. 120. -Turnon.33.CB.Sigma: 2. 0.01 10 - -Turnon.34.Name: Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.34.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.34.Tree: TagAndProbe -Turnon.34.XVar: tauPt -Turnon.34.Cut: hasHLTPath_3 -Turnon.34.WeightVar: bkgSubW -Turnon.34.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.34.FitRange:0 500 -Turnon.34.CB.Max: 1. 0.9 1. -Turnon.34.CB.Alpha: 3. 0.01 50. -Turnon.34.CB.N: 10. 1.001 100. -Turnon.34.CB.Mean: 30. 0. 120. -Turnon.34.CB.Sigma: 2. 0.01 10 - -Turnon.35.Name: Stage2_All_vs_HLT_IsoMu21_eta2p1_LooseIsoPFTau20_SingleL1_v -Turnon.35.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.35.Tree: TagAndProbe -Turnon.35.XVar: tauPt -Turnon.35.Cut: hasHLTPath_4 -Turnon.35.WeightVar: bkgSubW -Turnon.35.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.35.FitRange:0 500 -Turnon.35.CB.Max: 1. 0.9 1. -Turnon.35.CB.Alpha: 3. 0.01 50. -Turnon.35.CB.N: 10. 1.001 100. -Turnon.35.CB.Mean: 30. 0. 120. -Turnon.35.CB.Sigma: 2. 0.01 10 - -Turnon.36.Name: Stage2_All_vs_HLT_IsoMu19_eta2p1_MediumIsoPFTau32_Trk1_eta2p1_Reg_v -Turnon.36.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.36.Tree: TagAndProbe -Turnon.36.XVar: tauPt -Turnon.36.Cut: hasHLTPath_5 -Turnon.36.WeightVar: bkgSubW -Turnon.36.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.36.FitRange:0 500 -Turnon.36.CB.Max: 1. 0.9 1. -Turnon.36.CB.Alpha: 3. 0.01 50. -Turnon.36.CB.N: 10. 1.001 100. -Turnon.36.CB.Mean: 30. 0. 120. -Turnon.36.CB.Sigma: 2. 0.01 10 diff --git a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_bkgSub.par b/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_bkgSub.par deleted file mode 100644 index 29edb43a0f7..00000000000 --- a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_bkgSub.par +++ /dev/null @@ -1,23 +0,0 @@ -#OutputFile: results/TestBkgSub/fitOutput_MC.root -OutputFile: results/TestBkgSub/fitOutput.root -NCPU: 4 - -Turnon.N: 1 - - -Turnon.1.Name: Stage2_Barrel_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau20_v -#Turnon.1.File: /home/llr/cms/cadamuro/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/DY_MC_reHLT_7Set2016_forFit.root -Turnon.1.File: /home/llr/cms/cadamuro/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/allICHEPIsoOrder_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasHLTPath_2 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 -Turnon.1.FitRange: 0 140 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 3. 0.01 50. -Turnon.1.CB.N: 10. 1.001 100. -Turnon.1.CB.Mean: 30. 0. 120. -Turnon.1.CB.Sigma: 2. 0.01 10 - - diff --git a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_hasL120.par b/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_hasL120.par deleted file mode 100644 index 357f36a32bb..00000000000 --- a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_hasL120.par +++ /dev/null @@ -1,96 +0,0 @@ -OutputFile: results/TestTurnOn/fitOutput_L1_20GeV.root -NCPU: 4 - -Turnon.N: 6 - -Turnon.1.Name: Stage2_Barrel_vs_Pt_20GeV -Turnon.1.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: hasL1_20 -Turnon.1.SelectionVars: tauEta -Turnon.1.Selection: abs(tauEta)<1.305 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 3. 0.01 50. -Turnon.1.CB.N: 10. 1.001 100. -Turnon.1.CB.Mean: 30. 0. 120. -Turnon.1.CB.Sigma: 2. 0.01 10 - -Turnon.2.Name: Stage2_Barrel_vs_Pt_20GeV_iso -Turnon.2.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: hasL1_20_iso -Turnon.2.SelectionVars: tauEta -Turnon.2.Selection: abs(tauEta)<1.305 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.2.FitRange:0 500 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 3. 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 2. 0.01 10 - -Turnon.3.Name: Stage2_Endcaps_vs_Pt_20GeV -Turnon.3.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: hasL1_20 -Turnon.3.SelectionVars: tauEta -Turnon.3.Selection: abs(tauEta)>1.479 -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.3.FitRange:0 500 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 3. 0.01 50. -Turnon.3.CB.N: 10. 1.001 100. -Turnon.3.CB.Mean: 30. 0. 120. -Turnon.3.CB.Sigma: 2. 0.01 10 - -Turnon.4.Name: Stage2_Endcaps_vs_Pt_20GeV_iso -Turnon.4.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.4.Tree: TagAndProbe -Turnon.4.XVar: tauPt -Turnon.4.Cut: hasL1_20_iso -Turnon.4.SelectionVars: tauEta -Turnon.4.Selection: abs(tauEta)>1.479 -Turnon.4.WeightVar: bkgSubW -Turnon.4.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.4.FitRange:0 500 -Turnon.4.CB.Max: 1. 0.9 1. -Turnon.4.CB.Alpha: 3. 0.01 50. -Turnon.4.CB.N: 10. 1.001 100. -Turnon.4.CB.Mean: 30. 0. 120. -Turnon.4.CB.Sigma: 2. 0.01 10 - -Turnon.5.Name: Stage2_All_vs_Pt_20GeV -Turnon.5.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.5.Tree: TagAndProbe -Turnon.5.XVar: tauPt -Turnon.5.Cut: hasL1_20 -Turnon.5.WeightVar: bkgSubW -Turnon.5.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.5.FitRange:0 500 -Turnon.5.CB.Max: 1. 0.9 1. -Turnon.5.CB.Alpha: 3. 0.01 50. -Turnon.5.CB.N: 10. 1.001 100. -Turnon.5.CB.Mean: 30. 0. 120. -Turnon.5.CB.Sigma: 2. 0.01 10 - -Turnon.6.Name: Stage2_All_vs_Pt_20GeV_iso -Turnon.6.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.6.Tree: TagAndProbe -Turnon.6.XVar: tauPt -Turnon.6.Cut: hasL1_20_iso -Turnon.6.WeightVar: bkgSubW -Turnon.6.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.6.FitRange:0 500 -Turnon.6.CB.Max: 1. 0.9 1. -Turnon.6.CB.Alpha: 3. 0.01 50. -Turnon.6.CB.N: 10. 1.001 100. -Turnon.6.CB.Mean: 30. 0. 120. -Turnon.6.CB.Sigma: 2. 0.01 10 diff --git a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_isoHLT.par b/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_isoHLT.par deleted file mode 100644 index 4e64d594cf0..00000000000 --- a/TauTagAndProbe/test/fitter/run/stage2_turnOn_fitter_isoHLT.par +++ /dev/null @@ -1,50 +0,0 @@ -OutputFile: results/TestTurnOn/fitOutput_LooseIsoPFTau32.root -NCPU: 4 - -Turnon.N: 3 - -Turnon.1.Name: Stage2_Barrel_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau32_Trk1_eta2p1_Reg_v -Turnon.1.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.1.Tree: TagAndProbe -Turnon.1.XVar: tauPt -Turnon.1.Cut: isoHLT -Turnon.1.SelectionVars: tauEta -Turnon.1.Selection: abs(tauEta)<1.305 -Turnon.1.WeightVar: bkgSubW -Turnon.1.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.1.FitRange:0 500 -Turnon.1.CB.Max: 1. 0.9 1. -Turnon.1.CB.Alpha: 3. 0.01 50. -Turnon.1.CB.N: 10. 1.001 100. -Turnon.1.CB.Mean: 30. 0. 120. -Turnon.1.CB.Sigma: 2. 0.01 10 - -Turnon.2.Name: Stage2_Endcaps_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau32_Trk1_eta2p1_Reg_v -Turnon.2.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.2.Tree: TagAndProbe -Turnon.2.XVar: tauPt -Turnon.2.Cut: isoHLT -Turnon.2.SelectionVars: tauEta -Turnon.2.Selection: abs(tauEta)>1.479 -Turnon.2.WeightVar: bkgSubW -Turnon.2.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.2.FitRange:0 500 -Turnon.2.CB.Max: 1. 0.9 1. -Turnon.2.CB.Alpha: 3. 0.01 50. -Turnon.2.CB.N: 10. 1.001 100. -Turnon.2.CB.Mean: 30. 0. 120. -Turnon.2.CB.Sigma: 2. 0.01 10 - -Turnon.3.Name: Stage2_All_vs_HLT_IsoMu19_eta2p1_LooseIsoPFTau32_Trk1_eta2p1_Reg_v -Turnon.3.File: /afs/cern.ch/user/s/sbologna/TauTagAndProbe/CMSSW_8_0_5/src/TauTagAndProbe/TauTagAndProbe/test/fitter/NTuple_Merge_10Ago_MaxIso_FixRiccardo_forFit.root -Turnon.3.Tree: TagAndProbe -Turnon.3.XVar: tauPt -Turnon.3.Cut: isoHLT -Turnon.3.WeightVar: bkgSubW -Turnon.3.Binning: 4 8 10 12 14 16 18 19 20 21 22 23 24 26 28 30 32 35 40 45 50 60 70 90 110 140 200 500 -Turnon.3.FitRange:0 500 -Turnon.3.CB.Max: 1. 0.9 1. -Turnon.3.CB.Alpha: 3. 0.01 50. -Turnon.3.CB.N: 10. 1.001 100. -Turnon.3.CB.Mean: 30. 0. 120. -Turnon.3.CB.Sigma: 2. 0.01 10 diff --git a/TauTagAndProbe/test/fitter/setup.sh b/TauTagAndProbe/test/fitter/setup.sh deleted file mode 100644 index bf949dfb685..00000000000 --- a/TauTagAndProbe/test/fitter/setup.sh +++ /dev/null @@ -1 +0,0 @@ -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./obj diff --git a/TauTagAndProbe/test/fitter/src/TurnonFit.cpp b/TauTagAndProbe/test/fitter/src/TurnonFit.cpp deleted file mode 100644 index cde363de4de..00000000000 --- a/TauTagAndProbe/test/fitter/src/TurnonFit.cpp +++ /dev/null @@ -1,329 +0,0 @@ -/** - * @file TurnonFit.cpp - * @brief - * - * - * @author Jean-Baptiste Sauvan - * - * @date 05/10/2014 - * - * @internal - * Created : 05/10/2014 - * Last update : 05/10/2014 21:03:01 - * by : JB Sauvan - * - * ===================================================================================== - */ - - -#include "TurnonFit.h" - -#include -#include // std::filebuf - -#include "TCanvas.h" -#include "TH1.h" -#include "TAxis.h" -#include "TROOT.h" - -#include "RooCategory.h" -#include "RooEfficiency.h" -#include "RooDataSet.h" -#include "RooBinning.h" - -using namespace std; -using namespace RooFit; - - -/*****************************************************************/ -TurnonFit::TurnonFit(const std::string& name):m_name(name), - m_xVar ("xVar", "p_{T}", 20., 0., 150.), - m_max ("max", "max", 1.0, 0.9, 1.), - m_alpha ("alpha", "#alpha", 3., 0.01, 50.), - m_n ("n", "n", 10., 1.001, 50.), - m_mean ("mean", "mean", 20., 0., 50.), - m_sigma ("sigma", "#sigma", 2., 0.01, 10.), - m_mturn ("mturn", "mturn", 20., 10., 50.), - m_p ("p", "p", 0.8, 0.4, 1.), - m_width ("width", "width", 10., 1., 50.), - m_yrise ("yrise", "yrise", 0.9, 0.1, 1.0) -/*****************************************************************/ -{ - stringstream sxvar, smax, salpha, sn, smean, ssigma, smturn, sp, swidth, syrise; - sxvar << "xVar_" << m_name; - smax << "max_" << m_name; - salpha << "alpha_" << m_name; - sn << "n_" << m_name; - smean << "mean_" << m_name; - ssigma << "sigma_" << m_name; - syrise << "yrise_" << m_name; - m_xVar .SetName(sxvar.str().c_str()); - m_max .SetName(smax.str().c_str()); - m_alpha.SetName(salpha.str().c_str()); - m_n .SetName(sn.str().c_str()); - m_mean .SetName(smean.str().c_str()); - m_sigma.SetName(ssigma.str().c_str()); - m_mturn.SetName(smturn.str().c_str()); - m_p .SetName(sp.str().c_str()); - m_width.SetName(swidth.str().c_str()); - m_yrise.SetName(syrise.str().c_str()); -} - - -/*****************************************************************/ -TurnonFit::~TurnonFit() -/*****************************************************************/ -{ - //m_function->Delete(); - //m_fitResult->Delete(); - //m_plot->Delete(); -} - - - -/*****************************************************************/ -/*void TurnonFit::setCrystalBall(double max, double max0, double max1, - double alpha, double alpha0, double alpha1, - double n, double n0, double n1, - double mean, double mean0, double mean1, - double sigma, double sigma0, double sigma1, - double mturn, double mturn0, double mturn1, - double p, double p0, double p1, - double width, double width0, double width1) -*/ -/*****************************************************************/ -/*{ - m_max.setVal(max); - m_max.setRange(max0, max1); - m_alpha.setVal(alpha); - m_alpha.setRange(alpha0, alpha1); - m_n.setVal(n); - m_n.setRange(n0, n1); - m_mean.setVal(mean); - m_mean.setRange(mean0, mean1); - m_sigma.setVal(sigma); - m_sigma.setRange(sigma0, sigma1); - m_mturn.setVal(mturn); - m_mturn.setRange(mturn0, mturn1); - m_p.setVal(p); - m_p.setRange(p0,p1); - m_width.setVal(width); - m_width.setRange(width0,width1); - - stringstream cbName; - cbName << "cb_" << m_name; - // This uses function defined in FuncCB class. To make it run, arrange the makefile and compile it! - m_function = new FuncCB(cbName.str().c_str(), cbName.str().c_str(), m_xVar, m_mean, m_sigma, m_alpha, m_n, m_max,m_mturn,m_p,m_width) ; - -} -*/ - -/*****************************************************************/ -void TurnonFit::setCrystalBall(double max, double max0, double max1, - double alpha, double alpha0, double alpha1, - double n, double n0, double n1, - double mean, double mean0, double mean1, - double sigma, double sigma0, double sigma1, - double yrise, double yrise0, double yrise1 - ) -/*****************************************************************/ -{ - m_max.setVal(max); - m_max.setRange(max0, max1); - m_alpha.setVal(alpha); - m_alpha.setRange(alpha0, alpha1); - m_n.setVal(n); - m_n.setRange(n0, n1); - m_mean.setVal(mean); - m_mean.setRange(mean0, mean1); - m_sigma.setVal(sigma); - m_sigma.setRange(sigma0, sigma1); - m_yrise.setVal(yrise); - m_yrise.setRange(yrise0,yrise1); - - stringstream cbName; - cbName << "cb_" << m_name; - - m_function = new FuncCB_cdf(cbName.str().c_str(), cbName.str().c_str(), m_xVar, m_mean, m_sigma, m_alpha, m_n, m_max, m_yrise) ; - -} - - -/*****************************************************************/ -void TurnonFit::fit() -/*****************************************************************/ -{ - printParameters(); - - TFile* file = TFile::Open(m_fileName.c_str()); - TTree* tree = (TTree*)file->Get(m_treeName.c_str()); - - // Define cut used for efficiency calculation - RooCategory cut(m_cut.c_str(), m_cut.c_str()) ; - cut.defineType("accept",1) ; - cut.defineType("reject",0) ; - RooEfficiency eff("eff","efficiency", *m_function, cut, "accept"); - - // create dataset. In case of subset selection, add needed variables in the ArgSet - RooDataSet* dataSet; - RooArgSet argSet(m_xVar, cut); - vector selectionVars; - vector weightVars; - //m_selection = "tauPt>250"; - if(m_selection=="") - { - if (m_weightVar=="")dataSet = new RooDataSet("data", "data", argSet, Import(*tree)); - else - { - weightVars.push_back(RooRealVar(m_weightVar.c_str(), m_weightVar.c_str(), 0.)); - argSet.add(weightVars.back()); - dataSet = new RooDataSet("data", "data", argSet, Import(*tree), WeightVar(m_weightVar.c_str())); - } - } - else - { - for(unsigned i=0;iprintValue(os); - - // Create binned turn-on - int nbins = m_binning.size()-1; - for(UInt_t iBin = 0 ; iBin < m_binning.size() ; ++iBin) cout<cd(); // change current directory. Otherwise, m_plot is associated to "file", and file->Close() destroys m_plot - m_plot = m_xVar.frame(Bins(18000),Title("")) ; - stringstream plotName; - plotName << "plot_" << m_name; - m_plot->SetName(plotName.str().c_str()); - dataSet->plotOn(m_plot, DataError(RooAbsData::Poisson), Binning(binning), Efficiency(cut), MarkerColor(kBlack), LineColor(kBlack), MarkerStyle(20)); - - // fit functional form to unbinned dataset - //if(!m_noFit) m_fitResult = eff.fitTo(*dataSet,ConditionalObservables(m_xVar),Minos(kTRUE),Warnings(kFALSE),NumCPU(m_nCPU),Save(kTRUE),Verbose(kFALSE)); - if(!m_noFit) - { - if (m_weightVar==""){ m_fitResult = eff.fitTo(*dataSet,ConditionalObservables(m_xVar),Minos(kFALSE),Warnings(kFALSE),NumCPU(m_nCPU),Save(kTRUE),Verbose(kFALSE),SumW2Error(kTRUE)); - // if (m_weightVar=="") m_fitResult = eff.fitTo(*dataSet,ConditionalObservables(m_xVar),Minos(kFALSE),Warnings(kFALSE),NumCPU(m_nCPU),Save(kTRUE),Verbose(kFALSE)); - } else { m_fitResult = eff.fitTo(*dataSet,ConditionalObservables(m_xVar),Minos(kFALSE),Warnings(kFALSE),NumCPU(m_nCPU),Save(kTRUE),Verbose(kFALSE),SumW2Error(kTRUE)); - } - stringstream resultName; - resultName << "fitResult_" << m_name; - m_fitResult->SetName(resultName.str().c_str()); - } - // m_function->plotOn(m_plot,VisualizeError(*m_fitResult,1),FillColor(kOrange),LineColor(kRed),LineWidth(2)); - //m_function->plotOn(m_plot,LineColor(kRed),LineWidth(2)); - m_function->plotOn(m_plot,VisualizeError(*m_fitResult,2),FillColor(kBlue),LineColor(kRed),LineWidth(2)); - m_function->plotOn(m_plot,VisualizeError(*m_fitResult,1),FillColor(kOrange),LineColor(kRed),LineWidth(2)); - m_function->plotOn(m_plot,LineColor(kRed),LineWidth(2)); - dataSet->plotOn(m_plot, DataError(RooAbsData::Poisson), Binning(binning), Efficiency(cut), MarkerColor(kBlack), LineColor(kBlack), MarkerStyle(20)); - - m_plot->GetYaxis()->SetRangeUser(0,1.05); - m_plot->GetXaxis()->SetRangeUser(m_xVar.getMin(),m_xVar.getMax()); - //cout<<"m_xVar.getMax() = "<getObject(0); - - for (int ipt = 0; ipt < m_histo->GetN(); ++ipt) - { - double x,y; - m_histo->GetPoint(ipt,x,y); - cout<<"x = "<getObject(3); - m_fitError1Sigma = (RooCurve*)m_plot->getObject(1); - m_fitError2Sigma = (RooCurve*)m_plot->getObject(2); - stringstream histoName, fitName, fit1sigErrBandName,fit2sigErrBandName; - histoName << "histo_" << m_name; - fitName << "fit_" << m_name; - fit1sigErrBandName << "fit1sigErrBand_" << m_name; - fit2sigErrBandName << "fit2sigErrBand_" << m_name; - m_histo->SetName(histoName.str().c_str()); - m_fit->SetName(fitName.str().c_str()); - m_fitError1Sigma->SetName(fit1sigErrBandName.str().c_str()); - m_fitError2Sigma->SetName(fit2sigErrBandName.str().c_str()); - - file->Close(); - dataSet->Delete(); - - printParameters(); - -} - - - -/*****************************************************************/ -void TurnonFit::save(TFile* outputFile) -/*****************************************************************/ -{ - outputFile->cd(); - stringstream cName; - cName << "canvas_" << m_name; - TCanvas* canvas = new TCanvas(cName.str().c_str(), cName.str().c_str(), 800, 800); - canvas->SetGrid(); - TH1F* dummy = new TH1F("test","test",300,0.,300.); - dummy->GetXaxis()->SetRangeUser(0.,300.); - dummy->GetXaxis()->SetTitle("p_{T}^{offl.} [GeV]"); - dummy->GetXaxis()->SetTitleOffset(1.3); - dummy->GetYaxis()->SetTitle("L1 Efficiency"); - dummy->GetYaxis()->SetTitleOffset(1.3); - dummy->GetXaxis()->SetMoreLogLabels(); - dummy->SetTitle(""); - dummy->SetStats(0); - dummy->Draw(); - m_plot->Draw("same"); - // m_plot->Draw(); - canvas->Write(); - m_histo->Write(); - m_fit->Write(); - m_fitError1Sigma->Write(); - m_fitError2Sigma->Write(); - m_function->Write(); - if(!m_noFit) m_fitResult->Write(); -} - - -/*****************************************************************/ -void TurnonFit::printParameters() -/*****************************************************************/ -{ - cout<<"\n\n"; - cout<<"Turnon "< - * - * @date 06/10/2014 - * - * @internal - * Created : 06/10/2014 - * Last update : 06/10/2014 15:23:14 - * by : JB Sauvan - * - * ===================================================================================== - */ - - - - -#include "TurnonManager.h" -#include "Utilities.h" - - -using namespace std; - - - -/*****************************************************************/ -TurnonManager::TurnonManager() -/*****************************************************************/ -{ -} - - -/*****************************************************************/ -TurnonManager::~TurnonManager() -/*****************************************************************/ -{ - m_outputFile->Close(); - for(auto turnon=m_turnonFits.begin(); turnon!=m_turnonFits.end(); turnon++) - { - delete *turnon; - } -} - - -/*****************************************************************/ -bool TurnonManager::readConfig(const std::string& config) -/*****************************************************************/ -{ - int status = m_params.ReadFile(config.c_str(),EEnvLevel(0)); - if(status!=0) - { - cout<<"FATAL: Cannot read configuration file "< bins = Utilities::stringToVector(binning); - cout<<"bins in TurnonManager = "< fitRangeValues = Utilities::stringToVector(fitRange); - vector cbMaxValues = Utilities::stringToVector(cbMax); - vector cbAlphaValues = Utilities::stringToVector(cbAlpha); - vector cbNValues = Utilities::stringToVector(cbN); - vector cbMeanValues = Utilities::stringToVector(cbMean); - vector cbSigmaValues = Utilities::stringToVector(cbSigma); - vector cbMturnValues = Utilities::stringToVector(cbMturn); - vector cbPValues = Utilities::stringToVector(cbP); - vector cbWidthValues = Utilities::stringToVector(cbWidth); - vector cbYRiseValues = Utilities::stringToVector(cbYRise); - vector selectionVarsList; - Utilities::tokenize(selectionVars, selectionVarsList); - - - - m_turnonFits.push_back(new TurnonFit(name)); - m_turnonFits.back()->setFileName(file); - m_turnonFits.back()->setTreeName(tree); - m_turnonFits.back()->setNCPU(m_nCPU); - m_turnonFits.back()->setNoFit(m_noFit); - m_turnonFits.back()->setXVar(xVar, fitRangeValues[0], fitRangeValues[1]); - m_turnonFits.back()->setCut(cut); - m_turnonFits.back()->setSelectionVars(selectionVarsList); - m_turnonFits.back()->setSelection(selection); - m_turnonFits.back()->setWeightVar(weightVar); - m_turnonFits.back()->setBinning(bins); - /* - m_turnonFits.back()->setCrystalBall(cbMaxValues[0], cbMaxValues[1], cbMaxValues[2], - cbAlphaValues[0], cbAlphaValues[1], cbAlphaValues[2], - cbNValues[0], cbNValues[1], cbNValues[2], - cbMeanValues[0], cbMeanValues[1], cbMeanValues[2], - cbSigmaValues[0], cbSigmaValues[1], cbSigmaValues[2], - cbMturnValues[0],cbMturnValues[1], cbMturnValues[2], - cbPValues[0], cbPValues[1], cbPValues[2], - cbWidthValues[0], cbWidthValues[1], cbWidthValues[2] - ); - */ - m_turnonFits.back()->setCrystalBall(cbMaxValues[0], cbMaxValues[1], cbMaxValues[2], - cbAlphaValues[0], cbAlphaValues[1], cbAlphaValues[2], - cbNValues[0], cbNValues[1], cbNValues[2], - cbMeanValues[0], cbMeanValues[1], cbMeanValues[2], - cbSigmaValues[0], cbSigmaValues[1], cbSigmaValues[2], - cbYRiseValues[0], cbYRiseValues[1], cbYRiseValues[2] - ); - } - - return true; - -} - - -/*****************************************************************/ -void TurnonManager::fit() -/*****************************************************************/ -{ - for(auto turnon=m_turnonFits.begin(); turnon!=m_turnonFits.end(); turnon++) - { - (*turnon)->fit(); - (*turnon)->save(m_outputFile); - } -} - - - diff --git a/TauTagAndProbe/test/fitter/src/Utilities.cpp b/TauTagAndProbe/test/fitter/src/Utilities.cpp deleted file mode 100644 index 9ebd301fa27..00000000000 --- a/TauTagAndProbe/test/fitter/src/Utilities.cpp +++ /dev/null @@ -1,98 +0,0 @@ -/** - * @file Utilities.cxx - * @brief - * - * - * @author Jean-Baptiste Sauvan - * - * @date 03/27/2010 - * - * @internal - * Created : 03/27/2010 - * Last update : 03/27/2010 01:00:52 PM - * by : JB Sauvan - * - * ===================================================================================== - */ - - -#include "Utilities.h" - - -//--- STL - -using namespace std; - - - - - -/*****************************************************************/ -void Utilities::tokenize(const string& str, - vector& tokens, - const string& delimiter) -/*****************************************************************/ -{ - string::size_type length = delimiter.size(); - string::size_type lastPos = 0; - string::size_type pos = str.find(delimiter, 0); - - - while (string::npos != pos) - { - // Found a token, add it to the vector. - if(str.substr(lastPos, pos - lastPos).size()>0) - tokens.push_back(str.substr(lastPos, pos - lastPos)); - lastPos = pos + length; - // Find next "non-delimiter" - pos = str.find(delimiter, lastPos); - } - if(str.substr(lastPos).size()>0) - tokens.push_back(str.substr(lastPos)); -} - - - -/*****************************************************************/ -string Utilities::intToString(int n) -/*****************************************************************/ -{ - ostringstream oss; - oss << n; - return oss.str(); -} - - - -/*****************************************************************/ -void Utilities::findAndReplace(string& sInput, string sFind, string sReplace ) -/*****************************************************************/ -{ - size_t itPos = 0; - size_t itFindLen = sFind.length(); - size_t itReplaceLen = sReplace.length(); - - if( itFindLen == 0 ) - return; - - while( (itPos = sInput.find( sFind, itPos )) != std::string::npos ) - { - sInput.replace( itPos, itFindLen, sReplace ); - itPos += itReplaceLen; - } - -} - - -/*****************************************************************/ -void Utilities::strip(std::string& sInput) -/*****************************************************************/ -{ - //-- removing blanks at the beginning and at the end - while(*sInput.begin()==' ' || *sInput.begin()=='\t') sInput.erase(sInput.begin()); - while(*(sInput.end()-1)==' ' || *(sInput.end()-1)=='\t') sInput.erase(sInput.end()-1); -} - - - - diff --git a/TauTagAndProbe/test/fitter/src/main.cpp b/TauTagAndProbe/test/fitter/src/main.cpp deleted file mode 100644 index e2aa1252134..00000000000 --- a/TauTagAndProbe/test/fitter/src/main.cpp +++ /dev/null @@ -1,26 +0,0 @@ - - -#include -#include - -#include "TurnonManager.h" - -using namespace std; - - -int main(int argc, char** argv) -{ - if(argc!=2) - { - cout<<"Usage: fit.exe configFile\n"; - return 1; - } - string config(argv[1]); - - TurnonManager turnons; - bool status = turnons.readConfig(config); - if(!status) return 1; - turnons.fit(); - - return 0; -} diff --git a/TauTagAndProbe/test/produceTuples.py b/TauTagAndProbe/test/produceTuples.py new file mode 100644 index 00000000000..d12e4d292a2 --- /dev/null +++ b/TauTagAndProbe/test/produceTuples.py @@ -0,0 +1,215 @@ +import os +import re +import FWCore.ParameterSet.Config as cms +from FWCore.ParameterSet.VarParsing import VarParsing +from TauTriggerTools.Common.ProduceHelpers import * + +options = VarParsing('analysis') +options.register('inputFileList', '', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Text file with a list of the input root files to process.") +options.register('fileNamePrefix', '', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Prefix to add to input file names. Use file: for the files in the local file system.") +options.register('outputTupleFile', 'eventTuple.root', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Event tuple file.") +options.register('skipEvents', -1, VarParsing.multiplicity.singleton, VarParsing.varType.int, + "Number of events to skip") +options.register('eventList', '', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "List of events to process.") +options.register('lumiFile', '', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "JSON file with lumi mask.") +options.register('period', 'Run2018', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Data taking period") +options.register('triggerProcess', 'HLT', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Trigger process") +options.register('metFiltersProcess', '', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Process for MET filters. If empty, it will be deduced based on the period.") +options.register('globalTag', '', VarParsing.multiplicity.singleton, VarParsing.varType.string, + "Global tag. If empty, it will be deduced based on the period.") +options.register('isMC', True, VarParsing.multiplicity.singleton, VarParsing.varType.bool, "MC or Data") +options.register('runDeepTau', True, VarParsing.multiplicity.singleton, VarParsing.varType.bool, "Run DeepTau IDs") +options.register('pureGenMode', False, VarParsing.multiplicity.singleton, VarParsing.varType.bool, + "Don't apply any offline selection or tagging.") +options.register('wantSummary', False, VarParsing.multiplicity.singleton, VarParsing.varType.bool, + "Print run summary at the end of the job.") +options.parseArguments() + +processName = "TagAndProbe" +process = cms.Process(processName) + +process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") +process.load("Configuration.StandardSequences.GeometryRecoDB_cff") + +if len(options.globalTag) == 0: + process.GlobalTag.globaltag = getGlobalTag(options.period, options.isMC) +else: + process.GlobalTag.globaltag = options.globalTag +process.source = cms.Source('PoolSource', fileNames = cms.untracked.vstring()) +process.TFileService = cms.Service('TFileService', fileName=cms.string(options.outputTupleFile)) + +if len(options.inputFileList) > 0: + readFileList(process.source.fileNames, options.inputFileList, options.fileNamePrefix) +elif len(options.inputFiles) > 0: + addFilesToList(process.source.fileNames, options.inputFiles, options.fileNamePrefix) + +process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) ) +if options.maxEvents > 0: + process.maxEvents.input = cms.untracked.int32(options.maxEvents) +if options.skipEvents > 0: + process.source.skipEvents = cms.untracked.uint32(options.skipEvents) +if len(options.eventList) > 0: + process.source.eventsToProcess = cms.untracked.VEventRange(options.eventList.split(',')) +if len(options.lumiFile) > 0: + import FWCore.PythonUtilities.LumiList as LumiList + process.source.lumisToProcess = LumiList.LumiList(filename = options.lumiFile).getVLuminosityBlockRange() + +year = getYear(options.period) + +# Update electron ID according recommendations from https://twiki.cern.ch/twiki/bin/view/CMS/EgammaMiniAODV2 +if options.pureGenMode: + process.egammaPostRecoSeq = cms.Sequence() +else: + from RecoEgamma.EgammaTools.EgammaPostRecoTools import setupEgammaPostRecoSeq + ele_era = { + 2016: '2016-Legacy', + 2017: '2017-Nov17ReReco', + 2018: '2018-Prompt' + } + setupEgammaPostRecoSeq(process, runVID=True, runEnergyCorrections=False, era=ele_era[year]) + +# Update tau IDs according recommendations from https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuidePFTauID +import RecoTauTag.RecoTau.tools.runTauIdMVA as tauIdConfig +updatedTauName = "slimmedTausNewID" +tauIdsToKeep = [ "2017v2" ] +if options.runDeepTau: + tauIdsToKeep.append("deepTau2017v2p1") +tauIdEmbedder = tauIdConfig.TauIDEmbedder(process, cms, debug=False, updatedTauName=updatedTauName, + toKeep=tauIdsToKeep) +tauIdEmbedder.runTauID() +tauSrc_InputTag = cms.InputTag(updatedTauName) + +# Update MET filters according recommendations from https://twiki.cern.ch/twiki/bin/viewauth/CMS/MissingETOptionalFiltersRun2 +# Using post-Moriond2019 (a more complete) list of noisy crystals +process.metFilterSequence = cms.Sequence() +customMetFilters = cms.PSet() +if not options.pureGenMode and year in [ 2017, 2018 ]: + process.load('RecoMET.METFilters.ecalBadCalibFilter_cfi') + baddetEcallist = cms.vuint32([ + 872439604,872422825,872420274,872423218,872423215,872416066,872435036,872439336, + 872420273,872436907,872420147,872439731,872436657,872420397,872439732,872439339, + 872439603,872422436,872439861,872437051,872437052,872420649,872421950,872437185, + 872422564,872421566,872421695,872421955,872421567,872437184,872421951,872421694, + 872437056,872437057,872437313,872438182,872438951,872439990,872439864,872439609, + 872437181,872437182,872437053,872436794,872436667,872436536,872421541,872421413, + 872421414,872421031,872423083,872421439]) + process.ecalBadCalibReducedMINIAODFilter = cms.EDFilter("EcalBadCalibFilter", + EcalRecHitSource = cms.InputTag("reducedEgamma:reducedEERecHits"), + ecalMinEt = cms.double(50.), + baddetEcal = baddetEcallist, + taggingMode = cms.bool(True), + debug = cms.bool(False) + ) + process.metFilterSequence += process.ecalBadCalibReducedMINIAODFilter + customMetFilters.ecalBadCalibReducedMINIAODFilter = cms.InputTag("ecalBadCalibReducedMINIAODFilter") + +if len(options.metFiltersProcess) == 0: + metFiltersProcess = 'PAT' + if year in [ 2016, 2018 ] and not options.isMC: + metFiltersProcess = 'RECO' +else: + metFiltersProcess = options.metFiltersProcess + +# Re-apply MET corrections +process.metSequence = cms.Sequence() +if not options.pureGenMode and options.period in [ 'Run2016', 'Run2017' ]: + met_run_params = { } + if options.period == 'Run2017': + met_run_params = { + 'fixEE2017': True, + 'fixEE2017Params': { + 'userawPt': True, + 'ptThreshold':50.0, + 'minEtaThreshold':2.65, + 'maxEtaThreshold': 3.139 + } + } + from PhysicsTools.PatUtils.tools.runMETCorrectionsAndUncertainties import runMetCorAndUncFromMiniAOD + runMetCorAndUncFromMiniAOD(process, isData = not options.isMC, postfix="Updated", **met_run_params) + metInputTag = cms.InputTag('slimmedMETsUpdated', '', processName) + process.metSequence += process.fullPatMetSequenceUpdated +else: + metInputTag = cms.InputTag('slimmedMETs') + +from TauTriggerTools.Common import TriggerConfig +trigFile = '{}/src/TauTriggerTools/TauTagAndProbe/data/{}/triggers.json'.format(os.environ['CMSSW_BASE'], year) +hltPaths, tagHltPaths = TriggerConfig.LoadAsVPSet(trigFile) + +import HLTrigger.HLTfilters.hltHighLevel_cfi as hlt + +process.hltFilter = hlt.hltHighLevel.clone( + TriggerResultsTag = cms.InputTag("TriggerResults", "", "HLT"), + HLTPaths = [p + '*' for p in tagHltPaths], + andOr = cms.bool(True), # True (OR) accept if ANY is true, False (AND) accept if ALL are true + throw = cms.bool(True) # if True: throws exception if a trigger path is invalid +) + +process.patTriggerUnpacker = cms.EDProducer("PATTriggerObjectStandAloneUnpacker", + patTriggerObjectsStandAlone = cms.InputTag("slimmedPatTrigger"), + triggerResults = cms.InputTag('TriggerResults', '', options.triggerProcess), + unpackFilterLabels = cms.bool(True) +) + +process.selectionFilter = cms.EDFilter("TauTriggerSelectionFilter", + enabled = cms.bool(not options.pureGenMode), + electrons = cms.InputTag('slimmedElectrons'), + muons = cms.InputTag('slimmedMuons'), + jets = cms.InputTag('slimmedJets'), + met = metInputTag, + metFiltersResults = cms.InputTag('TriggerResults', '', metFiltersProcess), + customMetFilters = customMetFilters, + btagThreshold = cms.double(-1), + metFilters = cms.vstring(getMetFilters(options.period, options.isMC)), + mtCut = cms.double(-1) +) + +process.summaryProducer = cms.EDProducer("TauTriggerSummaryTupleProducer", + isMC = cms.bool(options.isMC), + genEvent = cms.InputTag('generator'), + puInfo = cms.InputTag('slimmedAddPileupInfo'), + vertices = cms.InputTag('offlineSlimmedPrimaryVertices'), + hltPaths = hltPaths +) + +process.tupleProducer = cms.EDProducer("TauTriggerTupleProducer", + isMC = cms.bool(options.isMC), + genEvent = cms.InputTag('generator'), + puInfo = cms.InputTag('slimmedAddPileupInfo'), + genParticles = cms.InputTag('prunedGenParticles'), + vertices = cms.InputTag('offlineSlimmedPrimaryVertices'), + signalMuon = cms.InputTag('selectionFilter'), + taus = tauSrc_InputTag, + jets = cms.InputTag('slimmedJets'), + met = metInputTag, + btagThreshold = cms.double(getBtagThreshold(options.period, 'Loose')), + hltPaths = hltPaths, + triggerProcess = cms.string(options.triggerProcess), + triggerObjects = cms.InputTag('patTriggerUnpacker'), + l1Taus = cms.InputTag("caloStage2Digis", "Tau", "RECO") +) + +process.p = cms.Path( + process.summaryProducer + + process.hltFilter + + process.egammaPostRecoSeq + + process.metSequence + + process.metFilterSequence + + process.selectionFilter + + process.rerunMvaIsolationSequence + + getattr(process, updatedTauName) + + process.patTriggerUnpacker + + process.tupleProducer +) + +# Verbosity customization +process.load("FWCore.MessageService.MessageLogger_cfi") +process.MessageLogger.cerr.FwkReport.reportEvery = getReportInterval(process.maxEvents.input.value()) +process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(options.wantSummary) ) diff --git a/TauTagAndProbe/test/reEmulL1.py b/TauTagAndProbe/test/reEmulL1.py deleted file mode 100644 index f414f8c4b70..00000000000 --- a/TauTagAndProbe/test/reEmulL1.py +++ /dev/null @@ -1,158 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -from Configuration.StandardSequences.Eras import eras - -isMC = True - -process = cms.Process("TagAndProbe",eras.Run2_2016) -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load('Configuration.StandardSequences.RawToDigi_Data_cff') -process.load('Configuration.StandardSequences.EndOfProcess_cff') -process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') -process.load('Configuration.StandardSequences.Services_cff') -process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -process.load('FWCore.MessageService.MessageLogger_cfi') -process.load('Configuration.EventContent.EventContent_cff') -process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff') - - -options = VarParsing.VarParsing ('analysis') -options.register ('secondaryFilesList','',VarParsing.VarParsing.multiplicity.singleton,VarParsing.VarParsing.varType.string, "List of secondary input files") - -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple.root' -options.inputFiles = [] -options.maxEvents = -999 - -options.parseArguments() - -import FWCore.Utilities.FileUtils as FileUtils -listSecondaryFiles = FileUtils.loadListFromFile (options.secondaryFilesList) - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - ), - - secondaryFileNames = cms.untracked.vstring(listSecondaryFiles) - #secondaryFileNames = cms.untracked.vstring('file:AA918EB1-6E64-E611-9BE0-00259074AE54.root') - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/302E52FC-8567-E611-B2AA-0CC47A703326.root', - #), -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/6E300626-5E26-E611-980B-02163E0119A2.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/08F68A47-5D26-E611-B042-02163E012239.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/1011D344-5E26-E611-ABC4-02163E011CF0.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/1E3EFD43-5E26-E611-AE17-02163E0146FF.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/2EA6473B-5E26-E611-B82A-02163E011EAC.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/3CD7BB24-5D26-E611-88A3-02163E014736.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/3CE74444-5E26-E611-8CE6-02163E012545.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/488D1A44-5E26-E611-8057-02163E014285.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/5EECA846-5D26-E611-A99C-02163E01432B.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/7C7FB848-5E26-E611-8A06-02163E014167.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/86E8DC25-5D26-E611-9827-02163E014713.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/A4B22F44-5E26-E611-AFDA-02163E0141F3.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/BA326B3B-5E26-E611-A6E0-02163E0124FA.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/BADC0417-5D26-E611-872F-02163E012A7E.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/C813CA24-5E26-E611-B7A2-02163E011F93.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/E251C323-5E26-E611-825D-02163E011A0F.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/E8FD1844-5E26-E611-B99E-02163E0146CB.root', -# '/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/F8B44816-5E26-E611-A87A-02163E011E74.root', -# ) - ) - - #process.source.eventsToProcess = cms.untracked.VEventRange('281613:108:12854629') - #process.source.eventsToProcess = cms.untracked.VEventRange('274199:353:670607108') - -else: - #process.GlobalTag.globaltag = 'auto:run2_mc' #MC 25 ns miniAODv2 - process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_v14' - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - #'/store/mc/RunIISpring16DR80/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM-RAW/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/40000/AA918EB1-6E64-E611-9BE0-00259074AE54.root' - '/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/1A13CB76-9B67-E611-A143-0050560210EC.root' - ), - secondaryFileNames = cms.untracked.vstring(listSecondaryFiles) - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/00000/08D2C535-5458-E511-B0C0-FA163E83549A.root' - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/10000/8A2D3925-4658-E511-80B2-02163E014126.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/10000/ECB7FC03-8058-E511-BE9D-02163E0141A2.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/4473DFF5-9456-E511-9C4B-002590494C8A.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/587A9A92-A956-E511-AC52-0025904B11CC.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/7076052C-4A57-E511-B371-00259074AE9A.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/7CD6A6C7-9C56-E511-87E5-003048C75840.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/7E281C8E-4357-E511-8498-00259074AE80.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/98884797-4957-E511-82C6-00259073E504.root', - #'/store/mc/RunIISummer15GS/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM/MCRUN2_71_V1-v1/60000/BE274E99-4957-E511-88ED-0025907A1A2E.root', - - ) - -process.schedule = cms.Schedule() - -## L1 emulation stuff - -if not isMC: - from L1Trigger.Configuration.customiseReEmul import L1TReEmulFromRAW - process = L1TReEmulFromRAW(process) -else: - from L1Trigger.Configuration.customiseReEmul import L1TReEmulMCFromRAW - process = L1TReEmulMCFromRAW(process) - from L1Trigger.Configuration.customiseUtils import L1TTurnOffUnpackStage2GtGmtAndCalo - process = L1TTurnOffUnpackStage2GtGmtAndCalo(process) - -process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2016_v3_2_cfi") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path ( - process.TAndPseq + - process.RawToDigi + - process.L1TReEmul + - process.NtupleSeq -) -process.schedule = cms.Schedule(process.p) # do my sequence pls - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/reEmulL1_MC_L1Only.py b/TauTagAndProbe/test/reEmulL1_MC_L1Only.py deleted file mode 100644 index a8b083eee1f..00000000000 --- a/TauTagAndProbe/test/reEmulL1_MC_L1Only.py +++ /dev/null @@ -1,171 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -from Configuration.StandardSequences.Eras import eras - -isMC = True - -process = cms.Process("ZeroBias",eras.Run2_2017) -#process = cms.Process("ZeroBias",eras.Run2_2016) - -process.load('Configuration.StandardSequences.Services_cff') -process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -process.load('FWCore.MessageService.MessageLogger_cfi') -process.load('Configuration.EventContent.EventContent_cff') -process.load('SimGeneral.MixingModule.mixNoPU_cfi') -process.load('Configuration.StandardSequences.GeometryRecoDB_cff') -process.load('Configuration.StandardSequences.MagneticField_cff') -process.load('Configuration.StandardSequences.RawToDigi_cff') -process.load('Configuration.StandardSequences.EndOfProcess_cff') -process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') - -#process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -#process.load('Configuration.StandardSequences.RawToDigi_Data_cff') -#process.load('Configuration.StandardSequences.EndOfProcess_cff') -#process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') -#process.load('Configuration.StandardSequences.Services_cff') -#process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -#process.load('FWCore.MessageService.MessageLogger_cfi') -#process.load('Configuration.EventContent.EventContent_cff') -#process.load('Configuration.Geometry.GeometryRecoDB_cff') -##process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -#process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff') - -#process.load('Configuration.StandardSequences.Services_cff') -#process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -#process.load('FWCore.MessageService.MessageLogger_cfi') -#process.load('Configuration.EventContent.EventContent_cff') -#process.load('SimGeneral.MixingModule.mixNoPU_cfi') -#process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -#process.load('Configuration.StandardSequences.MagneticField_cff') -#process.load('Configuration.StandardSequences.RawToDigi_cff') -#process.load('Configuration.StandardSequences.EndOfProcess_cff') -#process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') - - - -options = VarParsing.VarParsing ('analysis') - -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple_ZeroBias.root' -options.inputFiles = [] -options.maxEvents = -999 - -options.parseArguments() - -import FWCore.Utilities.FileUtils as FileUtils - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '90X_mcRun2_asymptotic_v0' - process.load('TauTagAndProbe.TauTagAndProbe.zeroBias_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - #'file:BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - '/store/data/Run2016E/ZeroBias/RAW/v2/000/276/831/00000/04145A1E-A54B-E611-A0C6-FA163E6A5A26.root' - #'/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - ), - ) - -else: # will use 80X - from Configuration.AlCa.autoCond import autoCond - #process.GlobalTag.globaltag = 'auto:run2_mc' - process.GlobalTag.globaltag = '92X_upgrade2017_TSG_For90XSamples_V2' - #process.GlobalTag.globaltag = '90X_mcRun2_asymptotic_v0' - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_TrancheIV_v6' - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_v14' - #process.GlobalTag.globaltag = 'auto:run2_mc' - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_v6' #MC 25 ns miniAODv2 - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_v3' #MC 25 ns miniAODv2 - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - process.load('TauTagAndProbe.TauTagAndProbe.zeroBias_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/PhaseISpring17DR/VBFHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM-RAW/FlatPU28to62HcalNZSRAW_HIG07_90X_upgrade2017_realistic_v20-v1/100000/004D4A52-A62C-E711-B518-848F69FD2853.root', - #'file:BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - #'file:0240C947-0CA4-E611-A94C-ECF4BBE1CEB0.root' - #'file:0A3E7062-D365-E611-BCF4-001EC9AF0377.root' - #'/store/mc/RunIISpring16DR80/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM-RAW/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/0A3E7062-D365-E611-BCF4-001EC9AF0377.root' - #'/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - ), - ) - -process.schedule = cms.Schedule() - -## L1 emulation stuff - -if not isMC: - from L1Trigger.Configuration.customiseReEmul import L1TReEmulFromRAW - process = L1TReEmulFromRAW(process) -else: - #from L1Trigger.Configuration.customiseReEmul import L1TReEmulMCFromRAW - #process = L1TReEmulMCFromRAW(process) - - from L1Trigger.Configuration.customiseReEmul import L1TReEmulMCFrom90xRAWSimHcalTP - process = L1TReEmulMCFrom90xRAWSimHcalTP(process) - - #4 lines below were here before - #from L1Trigger.Configuration.customiseReEmul import L1TReEmulFromRAWsimTP - #process = L1TReEmulFromRAWsimTP(process) - #from L1Trigger.Configuration.customiseUtils import L1TTurnOffUnpackStage2GtGmtAndCalo - #process = L1TTurnOffUnpackStage2GtGmtAndCalo(process) - - -process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2017_v1_9_inconsistent_mean_cfi") -#process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2017_v1_4_cfi") -#process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2017_v1_0_inconsistent_cfi") -#process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2016_v3_2_cfi") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.load('EventFilter.L1TRawToDigi.caloStage2Digis_cfi') -process.caloStage2Digis.InputLabel = cms.InputTag('rawDataCollector') - -process.p = cms.Path ( - process.RawToDigi + - process.caloStage2Digis + - process.L1TReEmul + - process.NtupleZeroBiasSeq -) -process.schedule = cms.Schedule(process.p) # do my sequence pls - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/reEmulL1_ZeroBias.py b/TauTagAndProbe/test/reEmulL1_ZeroBias.py deleted file mode 100644 index ca13d7b6297..00000000000 --- a/TauTagAndProbe/test/reEmulL1_ZeroBias.py +++ /dev/null @@ -1,162 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -from Configuration.StandardSequences.Eras import eras - -isMC = False -#isMC = False - -#process = cms.Process("ZeroBias",eras.Run2_2016) -process = cms.Process("ZeroBias",eras.Run2_2017) - -process.load('Configuration.StandardSequences.Services_cff') -process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -process.load('FWCore.MessageService.MessageLogger_cfi') -process.load('Configuration.EventContent.EventContent_cff') -process.load('Configuration.StandardSequences.GeometryRecoDB_cff') -process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff') -process.load('Configuration.StandardSequences.RawToDigi_Data_cff') -process.load('Configuration.StandardSequences.EndOfProcess_cff') -process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') - -#all above present before -#process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -#process.load('Configuration.StandardSequences.RawToDigi_Data_cff') -#process.load('Configuration.StandardSequences.EndOfProcess_cff') -#process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') -#process.load('Configuration.StandardSequences.Services_cff') -#process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -#process.load('FWCore.MessageService.MessageLogger_cfi') -#process.load('Configuration.EventContent.EventContent_cff') -#process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -#process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff') - -#process.load('Configuration.StandardSequences.Services_cff') -#process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') -#process.load('FWCore.MessageService.MessageLogger_cfi') -#process.load('Configuration.EventContent.EventContent_cff') -#process.load('SimGeneral.MixingModule.mixNoPU_cfi') -#process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') -#process.load('Configuration.StandardSequences.MagneticField_cff') -#process.load('Configuration.StandardSequences.RawToDigi_cff') -#process.load('Configuration.StandardSequences.EndOfProcess_cff') -#process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') - - - -options = VarParsing.VarParsing ('analysis') - -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple_ZeroBias.root' -options.inputFiles = [] -options.maxEvents = -999 - -options.parseArguments() - -import FWCore.Utilities.FileUtils as FileUtils - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '92X_dataRun2_HLT_v7' - #process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.zeroBias_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - #'file:BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - '/store/data/Run2017C/ZeroBias/RAW/v1/000/300/806/00000/02C685A4-567D-E711-9C28-02163E0127B8.root', - #'/store/data/Run2016E/ZeroBias/RAW/v2/000/277/420/00000/02E63363-A052-E611-8FD7-FA163E249562.root' - #'/store/data/Run2016E/ZeroBias/RAW/v2/000/276/831/00000/04145A1E-A54B-E611-A0C6-FA163E6A5A26.root' - #'/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - ), - ) - -else: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '92X_upgrade2017_TSG_For90XSamples_V2'#for VBF Hinv - #process.GlobalTag.globaltag = '90X_upgrade2017_TSG_Hcal_V3'#for VBF Hinv - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_v14'#for VBF HTT - #process.GlobalTag.globaltag = 'auto:run2_mc' - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_v6' #MC 25 ns miniAODv2 - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_v3' #MC 25 ns miniAODv2 - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - process.load('TauTagAndProbe.TauTagAndProbe.zeroBias_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - #'file:BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - 'file:0A3E7062-D365-E611-BCF4-001EC9AF0377.root' - #'/store/mc/RunIISpring16DR80/GluGluHToTauTau_M125_13TeV_powheg_pythia8/GEN-SIM-RAW/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/0A3E7062-D365-E611-BCF4-001EC9AF0377.root' - #'/store/data/Run2016B/SingleMuon/RAW/v2/000/274/199/00000/BCB1EC0B-5E26-E611-8240-02163E0145B8.root' - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - ), - ) - -process.schedule = cms.Schedule() - -## L1 emulation stuff - -if not isMC: - from L1Trigger.Configuration.customiseReEmul import L1TReEmulFromRAW - process = L1TReEmulFromRAW(process) -else: - from L1Trigger.Configuration.customiseReEmul import L1TReEmulMCFromRAW - process = L1TReEmulMCFromRAW(process) - from L1Trigger.Configuration.customiseUtils import L1TTurnOffUnpackStage2GtGmtAndCalo - process = L1TTurnOffUnpackStage2GtGmtAndCalo(process) - #from L1Trigger.Configuration.customiseReEmul import L1TReEmulFromRAWsimTP - #process = L1TReEmulFromRAWsimTP(process) - -process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2017_v1_4_cfi") -#process.load("L1Trigger.L1TCalorimeter.caloStage2Params_2016_v3_2_cfi") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.load('EventFilter.L1TRawToDigi.caloStage2Digis_cfi') -process.caloStage2Digis.InputLabel = cms.InputTag('rawDataCollector') - -process.p = cms.Path ( - process.RawToDigi + - process.caloStage2Digis + - process.L1TReEmul + - process.NtupleZeroBiasSeq -) -process.schedule = cms.Schedule(process.p) # do my sequence pls - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/submitOnTier3.py b/TauTagAndProbe/test/submitOnTier3.py deleted file mode 100644 index a5e6773aedf..00000000000 --- a/TauTagAndProbe/test/submitOnTier3.py +++ /dev/null @@ -1,107 +0,0 @@ -import os - -isMC = True -#isMC = False - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -for idx, block in enumerate(fileblocks): - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - if not isMC: - cmsRun = "cmsRun test.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - else: - cmsRun = "cmsRun test_noTagAndProbe.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " >& " + outLogName - - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") - # print command - os.system (command) diff --git a/TauTagAndProbe/test/submitOnTier3_AOD.py b/TauTagAndProbe/test/submitOnTier3_AOD.py deleted file mode 100644 index 38fdcfa5a24..00000000000 --- a/TauTagAndProbe/test/submitOnTier3_AOD.py +++ /dev/null @@ -1,109 +0,0 @@ -import os - -isMC = True -#isMC = False - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -for idx, block in enumerate(fileblocks): - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - if not isMC: - cmsRun = "cmsRun test.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - else: - cmsRun = "cmsRun test_noTagAndProbe_AOD.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " >& " + outLogName - - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") - # print command - os.system (command) diff --git a/TauTagAndProbe/test/submitOnTier3_multipleTaus.py b/TauTagAndProbe/test/submitOnTier3_multipleTaus.py deleted file mode 100644 index de62e9a9ee2..00000000000 --- a/TauTagAndProbe/test/submitOnTier3_multipleTaus.py +++ /dev/null @@ -1,107 +0,0 @@ -import os - -isMC = True -#isMC = False - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -for idx, block in enumerate(fileblocks): - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - if not isMC: - cmsRun = "cmsRun test.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - else: - cmsRun = "cmsRun test_noTagAndProbe_multipleTaus.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " >& " + outLogName - - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") - # print command - os.system (command) diff --git a/TauTagAndProbe/test/submitOnTier3_multipleTaus_AOD.py b/TauTagAndProbe/test/submitOnTier3_multipleTaus_AOD.py deleted file mode 100644 index cf0bed1173d..00000000000 --- a/TauTagAndProbe/test/submitOnTier3_multipleTaus_AOD.py +++ /dev/null @@ -1,109 +0,0 @@ -import os - -isMC = True -#isMC = False - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -for idx, block in enumerate(fileblocks): - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - if not isMC: - cmsRun = "cmsRun test.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - else: - cmsRun = "cmsRun test_noTagAndProbe_multipleTaus_AOD.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " >& " + outLogName - - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") - # print command - os.system (command) diff --git a/TauTagAndProbe/test/submitOnTier3_reEmulL1.py b/TauTagAndProbe/test/submitOnTier3_reEmulL1.py deleted file mode 100644 index b6ce6070991..00000000000 --- a/TauTagAndProbe/test/submitOnTier3_reEmulL1.py +++ /dev/null @@ -1,102 +0,0 @@ -import os -import json -from subprocess import Popen, PIPE - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -from das_client import get_data - -for idx, block in enumerate(fileblocks): - #print idx, block - secondaryFileName = folder+"/secondaryFileList_split_"+str(idx)+".txt" - f = open(secondaryFileName, 'w') - myList = list() - - for currentFile in block: - #print "finding parent of: ",currentFile - command_das = "das_client --query=\"parent file="+currentFile+"\" --limit=0" - #print " > command is: " , command_das - - pipe = Popen(command_das, shell=True, stdout=PIPE) - for line in pipe.stdout: - line = line.rstrip("\n") - myList.append(line) - #print >> f, line - - unique = [] - [unique.append(item) for item in myList if item not in unique] - #print unique - - for line in unique: - print >> f, line - - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - cmsRun = "cmsRun reEmulL1.py maxEvents=-1 inputFiles_load="+outListName + " secondaryFilesList="+secondaryFileName+" " + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") -# command = ('/opt/exp_soft/cms/t3/t3submit_new -short -q cms \'' + outJobName +"\'") - print command - os.system (command) diff --git a/TauTagAndProbe/test/submitOnTier3_reEmulL1_MC.py b/TauTagAndProbe/test/submitOnTier3_reEmulL1_MC.py deleted file mode 100644 index e24ba030f2a..00000000000 --- a/TauTagAndProbe/test/submitOnTier3_reEmulL1_MC.py +++ /dev/null @@ -1,123 +0,0 @@ -import os -import json -from subprocess import Popen, PIPE - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -from das_client import get_data - -for idx, block in enumerate(fileblocks): - print idx, block - secondaryFileName = folder+"/secondaryFileList_split_"+str(idx)+".txt" - f = open(secondaryFileName, 'w') - myList = list() - myList2 = list() - - for currentFile in block: - #print "finding parent of: ",currentFile - #command_das = "das_client --query=\" child file="+currentFile+"\" --limit=0" - command_das = "das_client --query=\" parent file="+currentFile+"\" --limit=0" - print " > command is: " , command_das - - pipe = Popen(command_das, shell=True, stdout=PIPE) - for line in pipe.stdout: - line = line.rstrip("\n") - myList.append(line) - #print >> f, line - - command_das2 = "das_client --query=\" parent file="+line+"\" --limit=0" - #command_das2 = "das_client --query=\" child file="+line+"\" --limit=0" - print command_das2 - #To be put back if parent of parent is needed - pipe2 = Popen(command_das2, shell=True, stdout=PIPE) - - #line2 = line.rstrip("\n") - #myList2.append(line2) - - #To be put back if parent of parent is needed - for line2 in pipe2.stdout: - line2 = line2.rstrip("\n") - myList2.append(line2) - - unique = [] - [unique.append(item) for item in myList2 if item not in unique] - print unique - - for line in unique: - print >> f, line - - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - cmsRun = "cmsRun reEmulL1.py maxEvents=-1 inputFiles_load="+outListName + " secondaryFilesList="+secondaryFileName+" " + " outputFile="+outRootName + " >& " + outLogName -# cmsRun = "cmsRun reEmulL1.py maxEvents=-1 inputFiles_load="+outListName + " secondaryFilesList="+secondaryFileName+" " + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") -# command = ('/opt/exp_soft/cms/t3/t3submit_new -short -q cms \'' + outJobName +"\'") - print command - #os.system (command) diff --git a/TauTagAndProbe/test/submitOnTier3_reEmulL1_zeroBias.py b/TauTagAndProbe/test/submitOnTier3_reEmulL1_zeroBias.py deleted file mode 100644 index 296ad6efc0c..00000000000 --- a/TauTagAndProbe/test/submitOnTier3_reEmulL1_zeroBias.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -import json -from subprocess import Popen, PIPE - -isMC = True -#isMC = False - -def chunks(l, n): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i+n] - -def splitInBlocks (l, n): - """split the list l in n blocks of equal size""" - k = len(l) / n - r = len(l) % n - - i = 0 - blocks = [] - while i < len(l): - if len(blocks) len(files) : njobs = len(files) -filelist.close() - -fileblocks = splitInBlocks (files, njobs) - -from das_client import get_data - -for idx, block in enumerate(fileblocks): - #print idx, block - - outRootName = folder + '/Ntuple_' + str(idx) + '.root' - outJobName = folder + '/job_' + str(idx) + '.sh' - outListName = folder + "/filelist_" + str(idx) + ".txt" - outLogName = os.getcwd() + "/" + folder + "/log_" + str(idx) + ".txt" - - jobfilelist = open(outListName, 'w') - for f in block: jobfilelist.write(f+"\n") - jobfilelist.close() - - if not isMC: - cmsRun = "cmsRun reEmulL1_ZeroBias.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " >& " + outLogName - #cmsRun = "cmsRun reEmulL1_ZeroBias.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " JSONfile="+JSONfile + " >& " + outLogName - else: - cmsRun = "cmsRun reEmulL1_MC_L1Only.py maxEvents=-1 inputFiles_load="+outListName + " outputFile="+outRootName + " >& " + outLogName - - skimjob = open (outJobName, 'w') - skimjob.write ('#!/bin/bash\n') - skimjob.write ('export X509_USER_PROXY=~/.t3/proxy.cert\n') - skimjob.write ('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - skimjob.write ('cd %s\n' % os.getcwd()) - skimjob.write ('export SCRAM_ARCH=slc6_amd64_gcc472\n') - skimjob.write ('eval `scram r -sh`\n') - skimjob.write ('cd %s\n'%os.getcwd()) - skimjob.write (cmsRun+'\n') - skimjob.close () - - os.system ('chmod u+rwx ' + outJobName) - command = ('/opt/exp_soft/cms/t3/t3submit_new -long \'' + outJobName +"\'") -# command = ('/opt/exp_soft/cms/t3/t3submit_new -short -q cms \'' + outJobName +"\'") - print command - #os.system (command) diff --git a/TauTagAndProbe/test/test.py b/TauTagAndProbe/test/test.py deleted file mode 100644 index c1415f92769..00000000000 --- a/TauTagAndProbe/test/test.py +++ /dev/null @@ -1,140 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -#isMC = False -isMC = False -#is2016 = True -is2016 = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load("Configuration.StandardSequences.GeometryDB_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -if isMC: - options.outputFile = 'NTuple_MC.root' -else: - options.outputFile = 'NTuple_Data.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - - -# START ELECTRON CUT BASED ID SECTION -# -# Set up everything that is needed to compute electron IDs and -# add the ValueMaps with ID decisions into the event data stream -# - -# Load tools and function definitions -from PhysicsTools.SelectorUtils.tools.vid_id_tools import * - -process.load("RecoEgamma.ElectronIdentification.ElectronMVAValueMapProducer_cfi") - - -#********************** -dataFormat = DataFormat.MiniAOD -switchOnVIDElectronIdProducer(process, dataFormat) -#********************** - -process.load("RecoEgamma.ElectronIdentification.egmGsfElectronIDs_cfi") -# overwrite a default parameter: for miniAOD, the collection name is a slimmed one -process.egmGsfElectronIDs.physicsObjectSrc = cms.InputTag('slimmedElectrons') - -from PhysicsTools.SelectorUtils.centralIDRegistry import central_id_registry -process.egmGsfElectronIDSequence = cms.Sequence(process.egmGsfElectronIDs) - -# Define which IDs we want to produce -# Each of these two example IDs contains all four standard -my_id_modules =[ -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Fall17_iso_V1_cff' -] - - -#Add them to the VID producer -for idmod in my_id_modules: - setupAllVIDIdsInModule(process,idmod,setupVIDElectronSelection) - - -egmMod = 'egmGsfElectronIDs' -mvaMod = 'electronMVAValueMapProducer' -regMod = 'electronRegressionValueMapProducer' -egmSeq = 'egmGsfElectronIDSequence' -setattr(process,egmMod,process.egmGsfElectronIDs.clone()) -setattr(process,mvaMod,process.electronMVAValueMapProducer.clone()) -setattr(process,regMod,process.electronRegressionValueMapProducer.clone()) -setattr(process,egmSeq,cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod))) -process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod)) - - - - - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '94X_dataRun2_v6' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2017F/SingleMuon/MINIAOD/17Nov2017-v1/00000/3E7C07F9-E6F1-E711-841A-0CC47A4C8E46.root' - ), - ) -else: - process.GlobalTag.globaltag = '94X_mc2017_realistic_v14' #MC 25 ns miniAODv2 - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/RunIIFall17MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/94X_mc2017_realistic_v10-v1/00000/005DC030-D3F4-E711-889A-02163E01A62D.root' - ) - ) - -if is2016 and not isMC: - process.patTriggerUnpacker.patTriggerObjectsStandAlone = cms.InputTag("selectedPatTrigger","","RECO") - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.electrons + - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1000 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_2018.py b/TauTagAndProbe/test/test_2018.py deleted file mode 100644 index 4dde4a29dad..00000000000 --- a/TauTagAndProbe/test/test_2018.py +++ /dev/null @@ -1,141 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load("Configuration.StandardSequences.GeometryDB_cff") -process.load("Configuration.Geometry.GeometryRecoDB_cff") - - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple_SingleMu.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - - - -# START ELECTRON CUT BASED ID SECTION -# -# Set up everything that is needed to compute electron IDs and -# add the ValueMaps with ID decisions into the event data stream -# - -# Load tools and function definitions -from PhysicsTools.SelectorUtils.tools.vid_id_tools import * - -process.load("RecoEgamma.ElectronIdentification.ElectronMVAValueMapProducer_cfi") - - -#********************** -dataFormat = DataFormat.MiniAOD -switchOnVIDElectronIdProducer(process, dataFormat) -#********************** - -process.load("RecoEgamma.ElectronIdentification.egmGsfElectronIDs_cfi") -# overwrite a default parameter: for miniAOD, the collection name is a slimmed one -process.egmGsfElectronIDs.physicsObjectSrc = cms.InputTag('slimmedElectrons') - -from PhysicsTools.SelectorUtils.centralIDRegistry import central_id_registry -process.egmGsfElectronIDSequence = cms.Sequence(process.egmGsfElectronIDs) - -# Define which IDs we want to produce -# Each of these two example IDs contains all four standard -my_id_modules =[ -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Fall17_iso_V1_cff', #Fall17 iso - -] - - -#Add them to the VID producer -for idmod in my_id_modules: - setupAllVIDIdsInModule(process,idmod,setupVIDElectronSelection) - - -egmMod = 'egmGsfElectronIDs' -mvaMod = 'electronMVAValueMapProducer' -regMod = 'electronRegressionValueMapProducer' -egmSeq = 'egmGsfElectronIDSequence' -setattr(process,egmMod,process.egmGsfElectronIDs.clone()) -setattr(process,mvaMod,process.electronMVAValueMapProducer.clone()) -setattr(process,regMod,process.electronRegressionValueMapProducer.clone()) -setattr(process,egmSeq,cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod))) -process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod)) - - - - - -if not isMC: - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '101X_dataRun2_HLT_v7' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.TagAndProbe.useMassCuts = cms.bool(False) - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2018A/SingleMuon/USER/MuTau-PromptReco-v1/000/315/366/00000/304B2974-9B4D-E811-9975-FA163E0A0217.root' - ), - ) - - - -else: - process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - - - -process.p = cms.Path( - process.electrons + - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1000 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_SingleMu2016_TandP.py b/TauTagAndProbe/test/test_SingleMu2016_TandP.py deleted file mode 100644 index b20ffd595fa..00000000000 --- a/TauTagAndProbe/test/test_SingleMu2016_TandP.py +++ /dev/null @@ -1,169 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms - -process = cms.Process("TagAndProbe") - -isMC = False -useGenMatch = False -useCustomHLT = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.register('outputfilename', '', VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.string, 'Filename for the Outputfile') - -process.Out = cms.OutputModule("PoolOutputModule", - fileName = cms.untracked.string ("MyOutputFile.root") -) - -if not isMC: - options.outputFile = 'NTuple_SingleMu_Data_2016.root' -else: - options.outputFile = 'NTuple_SingleMu_DYMC_2016.root' -options.inputFiles = [] -options.maxEvents = 100#-999 -options.parseArguments() - -# START ELECTRON CUT BASED ID SECTION -# -# Set up everything that is needed to compute electron IDs and -# add the ValueMaps with ID decisions into the event data stream -# - -# Load tools and function definitions -from PhysicsTools.SelectorUtils.tools.vid_id_tools import * - -process.load("RecoEgamma.ElectronIdentification.ElectronMVAValueMapProducer_cfi") - - -#********************** -dataFormat = DataFormat.MiniAOD -switchOnVIDElectronIdProducer(process, dataFormat) -#********************** - -process.load("RecoEgamma.ElectronIdentification.egmGsfElectronIDs_cfi") -# overwrite a default parameter: for miniAOD, the collection name is a slimmed one -process.egmGsfElectronIDs.physicsObjectSrc = cms.InputTag('slimmedElectrons') - -from PhysicsTools.SelectorUtils.centralIDRegistry import central_id_registry -process.egmGsfElectronIDSequence = cms.Sequence(process.egmGsfElectronIDs) - -# Define which IDs we want to produce -# Each of these two example IDs contains all four standard -my_id_modules =[ -'RecoEgamma.ElectronIdentification.Identification.cutBasedElectronID_Spring15_25ns_V1_cff', # both 25 and 50 ns cutbased ids produced -'RecoEgamma.ElectronIdentification.Identification.cutBasedElectronID_Spring15_50ns_V1_cff', -'RecoEgamma.ElectronIdentification.Identification.heepElectronID_HEEPV60_cff', # recommended for both 50 and 25 ns -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Spring15_25ns_nonTrig_V1_cff', # will not be produced for 50 ns, triggering still to come -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Spring15_25ns_Trig_V1_cff', # 25 ns trig -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Spring15_50ns_Trig_V1_cff', # 50 ns trig -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Spring16_GeneralPurpose_V1_cff', #Spring16 -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Spring16_HZZ_V1_cff', #Spring16 HZZ - -] - - -#Add them to the VID producer -for idmod in my_id_modules: - setupAllVIDIdsInModule(process,idmod,setupVIDElectronSelection) - - -egmMod = 'egmGsfElectronIDs' -mvaMod = 'electronMVAValueMapProducer' -regMod = 'electronRegressionValueMapProducer' -egmSeq = 'egmGsfElectronIDSequence' -setattr(process,egmMod,process.egmGsfElectronIDs.clone()) -setattr(process,mvaMod,process.electronMVAValueMapProducer.clone()) -#setattr(process,regMod,process.electronRegressionValueMapProducer.clone()) -#setattr(process,egmSeq,cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod))) -#process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod)) -process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)) - - - -if not isMC: - from Configuration.AlCa.autoCond import autoCond - #process.GlobalTag.globaltag = '80X_dataRun2_2016SeptRepro_v7' #for 2016G (era B-G) - process.GlobalTag.globaltag = '80X_dataRun2_2016SeptRepro_v7' #'80X_dataRun2_Prompt_v16' #for 2016H (era H) - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff_2016') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016H/SingleMuon/MINIAOD/03Feb2017_ver2-v1/80000/A2EB72CA-84EA-E611-95D0-001E674FB216.root' - #'/store/data/Run2016G/SingleMuon/MINIAOD/PromptReco-v1/000/278/819/00000/68409ABF-A263-E611-A259-FA163E2F90EB.root', - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v1/000/281/090/00000/14840EE8-C27F-E611-B9E4-02163E011A1B.root', - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v1/000/281/131/00000/C89845BF-9580-E611-B7BC-02163E01420B.root', - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v1/000/281/131/00000/C89845BF-9580-E611-B7BC-02163E01420B.root', - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v1/000/281/085/00000/C2ABE862-897F-E611-9BB9-FA163E6734CA.root' - ), - ) - - - -else: - process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_TrancheIV_v6' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_2016_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/RunIISummer16MiniAODv2/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/FlatPU28to62HcalNZSRAW_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/110000/026BBCB4-6BFA-E611-B9D9-ECF4BBE15B60.root' - ) - ) - - -if useCustomHLT: - process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","MYHLT") - process.Ntuplizer.triggerSet = cms.InputTag("selectedPatTriggerCustom", "", "MYHLT") - process.Ntuplizer.triggerResultsLabel = cms.InputTag("TriggerResults", "", "MYHLT") - process.Ntuplizer.L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "MYHLT") - process.Ntuplizer.L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "MYHLT") - - -if isMC and not useGenMatch: - process.Ntuplizer.taus = cms.InputTag("goodTaus") - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.electrons + - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_SingleMu2018_TandP.py b/TauTagAndProbe/test/test_SingleMu2018_TandP.py deleted file mode 100644 index 2115ad1c36a..00000000000 --- a/TauTagAndProbe/test/test_SingleMu2018_TandP.py +++ /dev/null @@ -1,213 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") -import os - -isMC = False -useGenMatch = False -useCustomHLT = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load("Configuration.StandardSequences.GeometryRecoDB_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -if not isMC: - options.outputFile = 'NTuple_SingleMu_Data_2018.root' -else: - options.outputFile = 'NTuple_SingleMu_DYMC_2018.root' - -options.inputFiles = [] -options.maxEvents = 1000 -options.parseArguments() - - -def get_cmssw_version(): - """returns 'CMSSW_X_Y_Z'""" - return os.environ["CMSSW_RELEASE_BASE"].split('/')[-1] - -def get_cmssw_version_number(): - """returns 'X_Y_Z' (without 'CMSSW_')""" - return map(int, get_cmssw_version().split("CMSSW_")[1].split("_")[0:3]) - -def versionToInt(release=9, subversion=4, patch=0): - return release * 10000 + subversion * 100 + patch - -def is_above_cmssw_version( release=10, subversion=2, patch=0): - split_cmssw_version = get_cmssw_version_number() - if versionToInt(release, subversion, patch) > versionToInt(split_cmssw_version[0], split_cmssw_version[1], split_cmssw_version[2]): - return False - else: - return True - -# START ELECTRON CUT BASED ID SECTION -# -# Set up everything that is needed to compute electron IDs and -# add the ValueMaps with ID decisions into the event data stream -# - -# Load tools and function definitions -from PhysicsTools.SelectorUtils.tools.vid_id_tools import * - -process.load("RecoEgamma.ElectronIdentification.ElectronMVAValueMapProducer_cfi") - - -#********************** -dataFormat = DataFormat.MiniAOD -switchOnVIDElectronIdProducer(process, dataFormat) -#********************** - -process.load("RecoEgamma.ElectronIdentification.egmGsfElectronIDs_cfi") -# overwrite a default parameter: for miniAOD, the collection name is a slimmed one -process.egmGsfElectronIDs.physicsObjectSrc = cms.InputTag('slimmedElectrons') - -from PhysicsTools.SelectorUtils.centralIDRegistry import central_id_registry -process.egmGsfElectronIDSequence = cms.Sequence(process.egmGsfElectronIDs) - -# Define which IDs we want to produce -# Each of these two example IDs contains all four standard -my_id_modules =[ -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Fall17_iso_V2_cff', #Fall17 iso -] - - -#Add them to the VID producer -for idmod in my_id_modules: - setupAllVIDIdsInModule(process,idmod,setupVIDElectronSelection) - - -egmMod = 'egmGsfElectronIDs' -mvaMod = 'electronMVAValueMapProducer' -setattr(process,egmMod,process.egmGsfElectronIDs.clone()) -setattr(process,mvaMod,process.electronMVAValueMapProducer.clone()) -process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)) - - -print "The current CMSSW version is", get_cmssw_version() - -if is_above_cmssw_version(10, 1, 9): - print "=== An electron MVA variable helper is added to the sequence for the electron ID ===" - helpMod = 'electronMVAVariableHelper' - setattr(process,helpMod,process.electronMVAVariableHelper.clone()) - process.electrons = cms.Sequence(getattr(process,helpMod)*getattr(process,mvaMod)*getattr(process,egmMod)) - - -# ================================= -# START TAU MVA BASED ID SECTION -- NEW WAY -- -# ================================= - -from TauTagAndProbe.TauTagAndProbe.runTauIdMVA import TauIDEmbedder - -toKeep = [] -toKeep.extend(("2017v1","2017v2","newDM2017v2", "dR0p32017v2")) - -na = TauIDEmbedder(process, cms, - debug=True, - toKeep = toKeep -) -na.runTauID() - - -if not isMC: - from Configuration.AlCa.autoCond import autoCond - #process.GlobalTag.globaltag = '102X_dataRun2_Sep2018Rereco_v1' # for 2018 ReReco RunABC samples - process.GlobalTag.globaltag = '102X_dataRun2_Sep2018ABC_v2' # for 2018 ReReco RunABC samples - #process.GlobalTag.globaltag = '102X_dataRun2_Prompt_v13' # for 2018 PromtReco RunD samples - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - # '/store/data/Run2017E/SingleMuon/MINIAOD/17Nov2017-v1/50000/000DCB8B-2ADD-E711-9100-008CFAF35AC0.root' - # '/store/data/Run2017B/SingleMuon/MINIAOD/31Mar2018-v1/100000/001642F1-6638-E811-B4FA-0025905B857A.root' - #'/store/data/Run2018A/SingleMuon/MINIAOD/06Jun2018-v1/410000/F2A0DDD5-FF83-E811-A183-FA163EFE9CA3.root' - '/store/data/Run2018D/SingleMuon/MINIAOD/PromptReco-v2/000/321/988/00000/6AB32549-34AF-E811-A47F-FA163EBD19B5.root' - ), - ) - - -else: - process.GlobalTag.globaltag = '102X_upgrade2018_realistic_v18' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - # '/store/mc/RunIIFall17MiniAOD/DY1JetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/94X_mc2017_realistic_v10-v1/20000/12DB4A06-65D7-E711-8DA4-0CC47A78A456.root' - #'/store/mc/RunIIFall17MiniAODv2/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14-v1/20000/00D13F2E-6F44-E811-923E-001E0BED0560.root' - #' /store/mc/RunIIAutumn18MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/102X_upgrade2018_realistic_v15-v1/80000/52E26FAC-370D-0742-96AF-3E5439F7CD9C.root' - '/store/mc/RunIIAutumn18MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/102X_upgrade2018_realistic_v15-v1/80000/FFDCFC59-4ABE-0646-AABE-BD5D65301169.root', - '/store/mc/RunIIAutumn18MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/102X_upgrade2018_realistic_v15-v1/80000/FC40474D-EE5A-C845-AEC3-E9B088F15648.root', - '/store/mc/RunIIAutumn18MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/102X_upgrade2018_realistic_v15-v1/80000/FA870652-56D3-1E45-B1EE-EF8A8FC89506.root', - '/store/mc/RunIIAutumn18MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/102X_upgrade2018_realistic_v15-v1/80000/FA553171-E5E9-5346-B634-6A64D6DEB64C.root', - '/store/mc/RunIIAutumn18MiniAOD/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/102X_upgrade2018_realistic_v15-v1/80000/FA553171-E5E9-5346-B634-6A64D6DEB64C.root' - ) - ) - - -if useCustomHLT: - process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","MYHLT") - process.Ntuplizer.triggerSet = cms.InputTag("selectedPatTriggerCustom", "", "MYHLT") - process.Ntuplizer.triggerResultsLabel = cms.InputTag("TriggerResults", "", "MYHLT") - process.Ntuplizer.L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "MYHLT") - process.Ntuplizer.L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "MYHLT") - - -if isMC and useGenMatch: - process.Ntuplizer.taus = cms.InputTag("genMatchedTaus") - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - - -process.p = cms.Path( - process.electrons + - process.rerunMvaIsolationSequence + - getattr(process, "NewTauIDsEmbedded") + - process.TAndPseq + - process.NtupleSeq -) - -if isMC and useGenMatch: - process.p = cms.Path( - process.electrons + - process.rerunMvaIsolationSequence + - getattr(process, "NewTauIDsEmbedded") + - process.TAndPseq + - process.genMatchedSeq + - process.NtupleSeq - ) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_SingleMu297050.py b/TauTagAndProbe/test/test_SingleMu297050.py deleted file mode 100644 index 4dde4a29dad..00000000000 --- a/TauTagAndProbe/test/test_SingleMu297050.py +++ /dev/null @@ -1,141 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load("Configuration.StandardSequences.GeometryDB_cff") -process.load("Configuration.Geometry.GeometryRecoDB_cff") - - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple_SingleMu.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - - - -# START ELECTRON CUT BASED ID SECTION -# -# Set up everything that is needed to compute electron IDs and -# add the ValueMaps with ID decisions into the event data stream -# - -# Load tools and function definitions -from PhysicsTools.SelectorUtils.tools.vid_id_tools import * - -process.load("RecoEgamma.ElectronIdentification.ElectronMVAValueMapProducer_cfi") - - -#********************** -dataFormat = DataFormat.MiniAOD -switchOnVIDElectronIdProducer(process, dataFormat) -#********************** - -process.load("RecoEgamma.ElectronIdentification.egmGsfElectronIDs_cfi") -# overwrite a default parameter: for miniAOD, the collection name is a slimmed one -process.egmGsfElectronIDs.physicsObjectSrc = cms.InputTag('slimmedElectrons') - -from PhysicsTools.SelectorUtils.centralIDRegistry import central_id_registry -process.egmGsfElectronIDSequence = cms.Sequence(process.egmGsfElectronIDs) - -# Define which IDs we want to produce -# Each of these two example IDs contains all four standard -my_id_modules =[ -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Fall17_iso_V1_cff', #Fall17 iso - -] - - -#Add them to the VID producer -for idmod in my_id_modules: - setupAllVIDIdsInModule(process,idmod,setupVIDElectronSelection) - - -egmMod = 'egmGsfElectronIDs' -mvaMod = 'electronMVAValueMapProducer' -regMod = 'electronRegressionValueMapProducer' -egmSeq = 'egmGsfElectronIDSequence' -setattr(process,egmMod,process.egmGsfElectronIDs.clone()) -setattr(process,mvaMod,process.electronMVAValueMapProducer.clone()) -setattr(process,regMod,process.electronRegressionValueMapProducer.clone()) -setattr(process,egmSeq,cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod))) -process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)*getattr(process,regMod)) - - - - - -if not isMC: - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '101X_dataRun2_HLT_v7' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.TagAndProbe.useMassCuts = cms.bool(False) - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2018A/SingleMuon/USER/MuTau-PromptReco-v1/000/315/366/00000/304B2974-9B4D-E811-9975-FA163E0A0217.root' - ), - ) - - - -else: - process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - - - -process.p = cms.Path( - process.electrons + - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1000 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_SingleMu297050_TandP.py b/TauTagAndProbe/test/test_SingleMu297050_TandP.py deleted file mode 100644 index b9f3c724373..00000000000 --- a/TauTagAndProbe/test/test_SingleMu297050_TandP.py +++ /dev/null @@ -1,177 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = True -useGenMatch = False -useCustomHLT = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") -process.load("Configuration.StandardSequences.GeometryRecoDB_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -if not isMC: - options.outputFile = 'NTuple_SingleMu_Data_2017.root' -else: - options.outputFile = 'NTuple_SingleMu_DYMC_2017.root' - -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - - -# START ELECTRON CUT BASED ID SECTION -# -# Set up everything that is needed to compute electron IDs and -# add the ValueMaps with ID decisions into the event data stream -# - -# Load tools and function definitions -from PhysicsTools.SelectorUtils.tools.vid_id_tools import * - -process.load("RecoEgamma.ElectronIdentification.ElectronMVAValueMapProducer_cfi") - - -#********************** -dataFormat = DataFormat.MiniAOD -switchOnVIDElectronIdProducer(process, dataFormat) -#********************** - -process.load("RecoEgamma.ElectronIdentification.egmGsfElectronIDs_cfi") -# overwrite a default parameter: for miniAOD, the collection name is a slimmed one -process.egmGsfElectronIDs.physicsObjectSrc = cms.InputTag('slimmedElectrons') - -from PhysicsTools.SelectorUtils.centralIDRegistry import central_id_registry -process.egmGsfElectronIDSequence = cms.Sequence(process.egmGsfElectronIDs) - -# Define which IDs we want to produce -# Each of these two example IDs contains all four standard -my_id_modules =[ -'RecoEgamma.ElectronIdentification.Identification.mvaElectronID_Fall17_iso_V1_cff', #Fall17 iso -] - - -#Add them to the VID producer -for idmod in my_id_modules: - setupAllVIDIdsInModule(process,idmod,setupVIDElectronSelection) - - -egmMod = 'egmGsfElectronIDs' -mvaMod = 'electronMVAValueMapProducer' -setattr(process,egmMod,process.egmGsfElectronIDs.clone()) -setattr(process,mvaMod,process.electronMVAValueMapProducer.clone()) -process.electrons = cms.Sequence(getattr(process,mvaMod)*getattr(process,egmMod)) - - -# ================================= -# START TAU MVA BASED ID SECTION -- NEW WAY -- -# ================================= - -from TauTagAndProbe.TauTagAndProbe.runTauIdMVA import TauIDEmbedder - -toKeep = [] -toKeep.extend(("2017v1","2017v2","newDM2017v2", "dR0p32017v2")) - -na = TauIDEmbedder(process, cms, - debug=True, - toKeep = toKeep -) -na.runTauID() - - -if not isMC: - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '94X_dataRun2_v6' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_2017_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - # '/store/data/Run2017E/SingleMuon/MINIAOD/17Nov2017-v1/50000/000DCB8B-2ADD-E711-9100-008CFAF35AC0.root' - '/store/data/Run2017B/SingleMuon/MINIAOD/31Mar2018-v1/100000/001642F1-6638-E811-B4FA-0025905B857A.root' - ), - ) - - - -else: - process.GlobalTag.globaltag = '94X_mc2017_realistic_v14' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_2017_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - # '/store/mc/RunIIFall17MiniAOD/DY1JetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/94X_mc2017_realistic_v10-v1/20000/12DB4A06-65D7-E711-8DA4-0CC47A78A456.root' - '/store/mc/RunIIFall17MiniAODv2/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14-v1/20000/00D13F2E-6F44-E811-923E-001E0BED0560.root' - - ) - ) - - -if useCustomHLT: - process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","MYHLT") - process.Ntuplizer.triggerSet = cms.InputTag("selectedPatTriggerCustom", "", "MYHLT") - process.Ntuplizer.triggerResultsLabel = cms.InputTag("TriggerResults", "", "MYHLT") - process.Ntuplizer.L2CaloJet_ForIsoPix_Collection = cms.InputTag("hltL2TausForPixelIsolation", "", "MYHLT") - process.Ntuplizer.L2CaloJet_ForIsoPix_IsoCollection = cms.InputTag("hltL2TauPixelIsoTagProducer", "", "MYHLT") - - -if isMC and useGenMatch: - process.Ntuplizer.taus = cms.InputTag("genMatchedTaus") - - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - - -process.p = cms.Path( - process.electrons + - process.rerunMvaIsolationSequence + - getattr(process, "NewTauIDsEmbedded") + - process.TAndPseq + - process.NtupleSeq -) - -if isMC and useGenMatch: - process.p = cms.Path( - process.electrons + - process.rerunMvaIsolationSequence + - getattr(process, "NewTauIDsEmbedded") + - process.TAndPseq + - process.genMatchedSeq + - process.NtupleSeq - ) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_data.py b/TauTagAndProbe/test/test_data.py deleted file mode 100644 index f0c9b93132f..00000000000 --- a/TauTagAndProbe/test/test_data.py +++ /dev/null @@ -1,110 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -#isMC = True -isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple_data.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/DE499C8E-1B8B-E611-8C93-02163E014207.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/ACA10D13-2D8B-E611-820E-FA163E8FD709.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/20AE9A37-2D8B-E611-8405-02163E0119B8.root' - #/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/1E61B437-358B-E611-91C5-02163E011AEE.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/BC5DD41A-2E8B-E611-B4EF-02163E012B59.root' - #'/store/mc/RunIISpring16MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/40000/00200284-F15C-E611-AA9B-002590574776.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/082EC2A0-4C28-E611-BC61-02163E014412.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/1014078C-4C28-E611-85FB-02163E0141C1.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/203E5176-4C28-E611-B4F8-02163E014743.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/32508866-4C28-E611-A38D-02163E011BAF.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/44AF1068-4C28-E611-80D0-02163E01367B.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/5AF4B08A-4C28-E611-AEC9-02163E01342C.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/6E3FD070-4C28-E611-9A1E-02163E011DC7.root', - - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7C2CB76B-4C28-E611-8D90-02163E01467F.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/86B68469-4C28-E611-92A6-02163E01419C.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/275/125/00000/24FC42B2-8036-E611-B42D-02163E012BD1.root' - ), - #eventsToProcess = cms.untracked.VEventRange('282092:1057805498') - ) -else: - process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - - '/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'file:B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'/store/mc/RunIIFall15MiniAODv2/GluGluToRadionToHHTo2B2Tau_M-700_narrow_13TeV-madgraph/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/60000/2CD9692A-9EB8-E511-A944-FACADE0000C9.root' - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/02A85EE9-70BA-E511-A0A2-0CC47A4D7678.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08C274E5-70BA-E511-920F-0CC47A78A458.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/06E9C8D6-79BA-E511-9EB0-0CC47A4D7600.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08B579D9-C7B8-E511-861F-00259021A526.root' - - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/065837E2-DA38-E611-9157-008CFA50291C.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0C6A2B22-DA38-E611-AA0C-842B2B2AB616.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0EB9BEA7-D938-E611-85DD-0242AC130003.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/12DC7ABE-DA38-E611-9BA2-0242AC130005.root' - - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(-1) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1000 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_noTagAndProbe.py b/TauTagAndProbe/test/test_noTagAndProbe.py deleted file mode 100644 index f4ed54301df..00000000000 --- a/TauTagAndProbe/test/test_noTagAndProbe.py +++ /dev/null @@ -1,112 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = True -#isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/DE499C8E-1B8B-E611-8C93-02163E014207.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/ACA10D13-2D8B-E611-820E-FA163E8FD709.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/20AE9A37-2D8B-E611-8405-02163E0119B8.root' - #/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/1E61B437-358B-E611-91C5-02163E011AEE.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/BC5DD41A-2E8B-E611-B4EF-02163E012B59.root' - #'/store/mc/RunIISpring16MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/40000/00200284-F15C-E611-AA9B-002590574776.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/082EC2A0-4C28-E611-BC61-02163E014412.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/1014078C-4C28-E611-85FB-02163E0141C1.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/203E5176-4C28-E611-B4F8-02163E014743.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/32508866-4C28-E611-A38D-02163E011BAF.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/44AF1068-4C28-E611-80D0-02163E01367B.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/5AF4B08A-4C28-E611-AEC9-02163E01342C.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/6E3FD070-4C28-E611-9A1E-02163E011DC7.root', - - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7C2CB76B-4C28-E611-8D90-02163E01467F.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/86B68469-4C28-E611-92A6-02163E01419C.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/275/125/00000/24FC42B2-8036-E611-B42D-02163E012BD1.root' - ), - #eventsToProcess = cms.untracked.VEventRange('282092:1057805498') - ) -else: - process.GlobalTag.globaltag = '92X_upgrade2017_TSG_For90XSamples_V2' #MC 25 ns miniAODv2 - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_noTagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/PhaseIFall16MiniAOD/VBFHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU28to62HcalNZSRAW_PhaseIFall16_90X_upgrade2017_realistic_v6_C1-v1/00000/182AC7D1-661B-E711-BA96-0242AC130006.root' - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'file:B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'/store/mc/RunIIFall15MiniAODv2/GluGluToRadionToHHTo2B2Tau_M-700_narrow_13TeV-madgraph/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/60000/2CD9692A-9EB8-E511-A944-FACADE0000C9.root' - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/02A85EE9-70BA-E511-A0A2-0CC47A4D7678.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08C274E5-70BA-E511-920F-0CC47A78A458.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/06E9C8D6-79BA-E511-9EB0-0CC47A4D7600.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08B579D9-C7B8-E511-861F-00259021A526.root' - - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/065837E2-DA38-E611-9157-008CFA50291C.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0C6A2B22-DA38-E611-AA0C-842B2B2AB616.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0EB9BEA7-D938-E611-85DD-0242AC130003.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/12DC7ABE-DA38-E611-9BA2-0242AC130005.root' - - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(10000) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1000 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_noTagAndProbe_AOD.py b/TauTagAndProbe/test/test_noTagAndProbe_AOD.py deleted file mode 100644 index 7b30fbc00f6..00000000000 --- a/TauTagAndProbe/test/test_noTagAndProbe_AOD.py +++ /dev/null @@ -1,111 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = True -#isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/DE499C8E-1B8B-E611-8C93-02163E014207.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/ACA10D13-2D8B-E611-820E-FA163E8FD709.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/20AE9A37-2D8B-E611-8405-02163E0119B8.root' - #/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/1E61B437-358B-E611-91C5-02163E011AEE.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/BC5DD41A-2E8B-E611-B4EF-02163E012B59.root' - #'/store/mc/RunIISpring16MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/40000/00200284-F15C-E611-AA9B-002590574776.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/082EC2A0-4C28-E611-BC61-02163E014412.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/1014078C-4C28-E611-85FB-02163E0141C1.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/203E5176-4C28-E611-B4F8-02163E014743.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/32508866-4C28-E611-A38D-02163E011BAF.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/44AF1068-4C28-E611-80D0-02163E01367B.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/5AF4B08A-4C28-E611-AEC9-02163E01342C.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/6E3FD070-4C28-E611-9A1E-02163E011DC7.root', - - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7C2CB76B-4C28-E611-8D90-02163E01467F.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/86B68469-4C28-E611-92A6-02163E01419C.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/275/125/00000/24FC42B2-8036-E611-B42D-02163E012BD1.root' - ), - #eventsToProcess = cms.untracked.VEventRange('282092:1057805498') - ) -else: - process.GlobalTag.globaltag = '92X_upgrade2017_TSG_For90XSamples_V1' - #process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_noTagAndProbe_AOD_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/PhaseISpring17DR/VBFHToTauTau_M125_13TeV_powheg_pythia8/AODSIM/FlatPU28to62HcalNZSRAW_HIG07_90X_upgrade2017_realistic_v20-v1/100000/0020BB60-C22C-E711-AFD5-00266CFEFE08.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'file:B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'/store/mc/RunIIFall15MiniAODv2/GluGluToRadionToHHTo2B2Tau_M-700_narrow_13TeV-madgraph/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/60000/2CD9692A-9EB8-E511-A944-FACADE0000C9.root' - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/02A85EE9-70BA-E511-A0A2-0CC47A4D7678.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08C274E5-70BA-E511-920F-0CC47A78A458.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/06E9C8D6-79BA-E511-9EB0-0CC47A4D7600.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08B579D9-C7B8-E511-861F-00259021A526.root' - - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/065837E2-DA38-E611-9157-008CFA50291C.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0C6A2B22-DA38-E611-AA0C-842B2B2AB616.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0EB9BEA7-D938-E611-85DD-0242AC130003.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/12DC7ABE-DA38-E611-9BA2-0242AC130005.root' - - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(10000) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 10 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_noTagAndProbe_multipleTaus.py b/TauTagAndProbe/test/test_noTagAndProbe_multipleTaus.py deleted file mode 100644 index b50cb10c1f8..00000000000 --- a/TauTagAndProbe/test/test_noTagAndProbe_multipleTaus.py +++ /dev/null @@ -1,110 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = True -#isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/DE499C8E-1B8B-E611-8C93-02163E014207.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/ACA10D13-2D8B-E611-820E-FA163E8FD709.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/20AE9A37-2D8B-E611-8405-02163E0119B8.root' - #/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/1E61B437-358B-E611-91C5-02163E011AEE.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/BC5DD41A-2E8B-E611-B4EF-02163E012B59.root' - #'/store/mc/RunIISpring16MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/40000/00200284-F15C-E611-AA9B-002590574776.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/082EC2A0-4C28-E611-BC61-02163E014412.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/1014078C-4C28-E611-85FB-02163E0141C1.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/203E5176-4C28-E611-B4F8-02163E014743.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/32508866-4C28-E611-A38D-02163E011BAF.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/44AF1068-4C28-E611-80D0-02163E01367B.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/5AF4B08A-4C28-E611-AEC9-02163E01342C.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/6E3FD070-4C28-E611-9A1E-02163E011DC7.root', - - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7C2CB76B-4C28-E611-8D90-02163E01467F.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/86B68469-4C28-E611-92A6-02163E01419C.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/275/125/00000/24FC42B2-8036-E611-B42D-02163E012BD1.root' - ), - #eventsToProcess = cms.untracked.VEventRange('282092:1057805498') - ) -else: - process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_noTagAndProbe_multipleTaus_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - - '/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'file:B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'/store/mc/RunIIFall15MiniAODv2/GluGluToRadionToHHTo2B2Tau_M-700_narrow_13TeV-madgraph/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/60000/2CD9692A-9EB8-E511-A944-FACADE0000C9.root' - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/02A85EE9-70BA-E511-A0A2-0CC47A4D7678.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08C274E5-70BA-E511-920F-0CC47A78A458.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/06E9C8D6-79BA-E511-9EB0-0CC47A4D7600.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08B579D9-C7B8-E511-861F-00259021A526.root' - - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/065837E2-DA38-E611-9157-008CFA50291C.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0C6A2B22-DA38-E611-AA0C-842B2B2AB616.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0EB9BEA7-D938-E611-85DD-0242AC130003.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/12DC7ABE-DA38-E611-9BA2-0242AC130005.root' - - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(10000) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 1000 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTagAndProbe/test/test_noTagAndProbe_multipleTaus_AOD.py b/TauTagAndProbe/test/test_noTagAndProbe_multipleTaus_AOD.py deleted file mode 100644 index d5f548cc2fb..00000000000 --- a/TauTagAndProbe/test/test_noTagAndProbe_multipleTaus_AOD.py +++ /dev/null @@ -1,110 +0,0 @@ -import FWCore.ParameterSet.VarParsing as VarParsing -import FWCore.PythonUtilities.LumiList as LumiList -import FWCore.ParameterSet.Config as cms -process = cms.Process("TagAndProbe") - -isMC = True -#isMC = False - -process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") - -#### handling of cms line options for tier3 submission -#### the following are dummy defaults, so that one can normally use the config changing file list by hand etc. - -options = VarParsing.VarParsing ('analysis') -options.register ('skipEvents', - -1, # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.int, # string, int, or float - "Number of events to skip") -options.register ('JSONfile', - "", # default value - VarParsing.VarParsing.multiplicity.singleton, # singleton or list - VarParsing.VarParsing.varType.string, # string, int, or float - "JSON file (empty for no JSON)") -options.outputFile = 'NTuple.root' -options.inputFiles = [] -options.maxEvents = -999 -options.parseArguments() - -if not isMC: # will use 80X - from Configuration.AlCa.autoCond import autoCond - process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8' - process.load('TauTagAndProbe.TauTagAndProbe.tagAndProbe_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/DE499C8E-1B8B-E611-8C93-02163E014207.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/ACA10D13-2D8B-E611-820E-FA163E8FD709.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/20AE9A37-2D8B-E611-8405-02163E0119B8.root' - #/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/1E61B437-358B-E611-91C5-02163E011AEE.root' - #'/store/data/Run2016H/SingleMuon/MINIAOD/PromptReco-v2/000/282/092/00000/BC5DD41A-2E8B-E611-B4EF-02163E012B59.root' - #'/store/mc/RunIISpring16MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/40000/00200284-F15C-E611-AA9B-002590574776.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/082EC2A0-4C28-E611-BC61-02163E014412.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/1014078C-4C28-E611-85FB-02163E0141C1.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/203E5176-4C28-E611-B4F8-02163E014743.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/32508866-4C28-E611-A38D-02163E011BAF.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/44AF1068-4C28-E611-80D0-02163E01367B.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/5AF4B08A-4C28-E611-AEC9-02163E01342C.root', - # '/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/6E3FD070-4C28-E611-9A1E-02163E011DC7.root', - - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7005DB70-4C28-E611-8628-02163E0144DD.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/7C2CB76B-4C28-E611-8D90-02163E01467F.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/274/199/00000/86B68469-4C28-E611-92A6-02163E01419C.root', - #'/store/data/Run2016B/SingleMuon/MINIAOD/PromptReco-v2/000/275/125/00000/24FC42B2-8036-E611-B42D-02163E012BD1.root' - ), - #eventsToProcess = cms.untracked.VEventRange('282092:1057805498') - ) -else: - process.GlobalTag.globaltag = '92X_upgrade2017_TSG_For90XSamples_V1' #MC 25 ns miniAODv2 - # process.GlobalTag.globaltag = '76X_dataRun2_16Dec2015_v0' - process.load('TauTagAndProbe.TauTagAndProbe.MCanalysis_noTagAndProbe_multipleTaus_AOD_cff') - process.source = cms.Source("PoolSource", - fileNames = cms.untracked.vstring( - '/store/mc/PhaseISpring17DR/VBFHToTauTau_M125_13TeV_powheg_pythia8/AODSIM/FlatPU28to62HcalNZSRAW_HIG07_90X_upgrade2017_realistic_v20-v1/100000/0020BB60-C22C-E711-AFD5-00266CFEFE08.root' - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/FlatPU20to70HcalNZSRAW_withHLT_80X_mcRun2_asymptotic_v14-v1/50000/B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'file:B0D22F36-9567-E611-A5FB-0CC47A4DEE76.root' - #'/store/mc/RunIIFall15MiniAODv2/GluGluToRadionToHHTo2B2Tau_M-700_narrow_13TeV-madgraph/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/60000/2CD9692A-9EB8-E511-A944-FACADE0000C9.root' - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/02A85EE9-70BA-E511-A0A2-0CC47A4D7678.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08C274E5-70BA-E511-920F-0CC47A78A458.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/06E9C8D6-79BA-E511-9EB0-0CC47A4D7600.root', - #'/store/mc/RunIIFall15MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/70000/08B579D9-C7B8-E511-861F-00259021A526.root' - - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/065837E2-DA38-E611-9157-008CFA50291C.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0C6A2B22-DA38-E611-AA0C-842B2B2AB616.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/0EB9BEA7-D938-E611-85DD-0242AC130003.root', - #'/store/mc/RunIISpring16MiniAODv2/GluGluHToTauTau_M125_13TeV_powheg_pythia8/MINIAODSIM/PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/80000/12DC7ABE-DA38-E611-9BA2-0242AC130005.root' - - ) - ) - -if options.JSONfile: - print "Using JSON: " , options.JSONfile - process.source.lumisToProcess = LumiList.LumiList(filename = options.JSONfile).getVLuminosityBlockRange() - -if options.inputFiles: - process.source.fileNames = cms.untracked.vstring(options.inputFiles) - -process.maxEvents = cms.untracked.PSet( - input = cms.untracked.int32(10000) -) - -if options.maxEvents >= -1: - process.maxEvents.input = cms.untracked.int32(options.maxEvents) -if options.skipEvents >= 0: - process.source.skipEvents = cms.untracked.uint32(options.skipEvents) - -process.options = cms.untracked.PSet( - wantSummary = cms.untracked.bool(True) -) - -process.p = cms.Path( - process.TAndPseq + - process.NtupleSeq -) - -# Silence output -process.load("FWCore.MessageService.MessageLogger_cfi") -process.MessageLogger.cerr.FwkReport.reportEvery = 10 - -# Adding ntuplizer -process.TFileService=cms.Service('TFileService',fileName=cms.string(options.outputFile)) diff --git a/TauTriggerFitTool/python/binning2018.py b/TauTriggerFitTool/python/binning2018.py index f953b606b4e..0a342e99452 100644 --- a/TauTriggerFitTool/python/binning2018.py +++ b/TauTriggerFitTool/python/binning2018.py @@ -1,5 +1,6 @@ from array import array from math import sqrt +import numpy as np class getbinning2018: @@ -46,6 +47,16 @@ def getBinning(self): edgesETau.append( float(i) ) for i in range(160, 600, 350 ) : edgesETau.append( float(i) ) + + #ref_bins = np.concatenate((np.arange(20, 100, 1), np.arange(100, 200, 5), np.arange(200, 301, 10))) + ref_bins = np.concatenate((np.arange(20, 50, 1), np.arange(50, 100, 2), np.arange(100, 150, 5), + np.arange(150, 200, 10), np.arange(200, 301, 20))) + edgesdiTau = ref_bins.tolist() + edgesMuTau = ref_bins.tolist() + edgesETau = ref_bins.tolist() + #edgesdiTau = np.arange(20, 501, 1).tolist() + #edgesMuTau = np.arange(20, 501, 1).tolist() + #edgesETau = np.arange(20, 501, 1).tolist() edgesdict ={"ditau":edgesdiTau, "mutau":edgesMuTau, "etau":edgesETau} return edgesdict diff --git a/TauTriggerFitTool/python/produceTriggerEfficiencies.py b/TauTriggerFitTool/python/produceTriggerEfficiencies.py index c3ac2b24a3e..21c28324dc1 100644 --- a/TauTriggerFitTool/python/produceTriggerEfficiencies.py +++ b/TauTriggerFitTool/python/produceTriggerEfficiencies.py @@ -2,9 +2,10 @@ from array import array gROOT.SetBatch(True) from math import sqrt -from functions import * +from functions import * from binning2017 import * from binning2018 import * +import numpy as np #choose which year do you want to run it: Samples2016 = False @@ -27,7 +28,7 @@ files = files2018 else: "Please select only one of the year!" - + gStyle.SetFrameLineWidth(1) gStyle.SetPadBottomMargin(0.13) gStyle.SetPadLeftMargin(0.15) @@ -56,9 +57,19 @@ bins = getbinning2017() elif(Samples2018): bins = getbinning2018() - -bin = bins.getBinning() -binDM = bins.getBinningDM() + +#bin = bins.getBinning() +#binDM = bins.getBinningDM() +bin_ref = np.arange(20, 501, 1).tolist() +bin = {} +binDM = {} +for trig in triggers: + bin[trig] = bin_ref + binDM[trig] = {} + for dm in tauDMs: + binDM[trig][dm] = {} + for sample_type in types: + binDM[trig][dm][sample_type] = bin_ref #binDM = bins.getBinningPerDM() hPtDen = [[],[],[]] @@ -68,7 +79,7 @@ hPtNumDM = [[],[],[],[]] for ipath, trigger in enumerate(triggers): - + print "bin[", trigger, "]", bin[trigger] hPtNum.append([]) hPtDen.append([]) @@ -87,60 +98,54 @@ histoname = "histo_" + typ + "_" + wp + "_" + trigger hPtNumDM[ipath][index].append([]) hPtDenDM[ipath][index].append([]) - + hPtDen[ipath][index].append(TH1F (histoname + "_Den", "", len(bin[trigger])-1, array('f',bin[trigger]))) hPtNum[ipath][index].append(TH1F (histoname + "_Num", "", len(bin[trigger])-1, array('f',bin[trigger]))) # per DM for idm, DM in enumerate(tauDMs): - if(Samples2017): - hPtDenDM[ipath][index][ind].append(TH1F (histoname + "_Den_" + DM, "", len(binDM[trigger][DM][typ])-1, array('f',binDM[trigger][DM][typ]))) - hPtNumDM[ipath][index][ind].append(TH1F (histoname + "_Num_" + DM, "", len(binDM[trigger][DM][typ])-1, array('f',binDM[trigger][DM][typ]))) - elif(Samples2018): - hPtDenDM[ipath][index][ind].append(TH1F (histoname + "_Den_" + DM, "", len(binDM[trigger][DM][typ])-1, array('f',binDM[trigger][DM][typ]))) - hPtNumDM[ipath][index][ind].append(TH1F (histoname + "_Num_" + DM, "", len(binDM[trigger][DM][typ])-1, array('f',binDM[trigger][DM][typ]))) - #hPtDenDM[ipath][index][ind].append(TH1F (histoname + "_Den_" + DM, "", len(binDM[trigger])-1, array('f',binDM[trigger]))) - #hPtNumDM[ipath][index][ind].append(TH1F (histoname + "_Num_" + DM, "", len(binDM[trigger])-1, array('f',binDM[trigger]))) - + hPtDenDM[ipath][index][ind].append(TH1F (histoname + "_Den_" + DM, "", len(binDM[trigger][DM][typ])-1, array('f',binDM[trigger][DM][typ]))) + hPtNumDM[ipath][index][ind].append(TH1F (histoname + "_Num_" + DM, "", len(binDM[trigger][DM][typ])-1, array('f',binDM[trigger][DM][typ]))) + for index, filename in enumerate(files): print "filename", filename - + file = TFile.Open(filename) tree = file.Get('TagAndProbe') - triggerNamesTree = file.Get("triggerNames") - + triggerNamesTree = file.Get("triggerNames") + print "Populating histograms" Nevts = 0 for iEv in range (0, tree.GetEntries()): tree.GetEntry(iEv) - + tauPt = tree.tauPt HLTPt = tree.hltPt tauEta = tree.tauEta tauPhi = tree.tauPhi tauDM = tree.tauDM Nvtx = tree.Nvtx - + # tau Energy Shift (SF) is only applied for 2017 for now! if("DYJets" in filename): puweight = tree.puweight if (Samples2017): tauPt_ESshifted = tree.tauPt_ESshifted - else: + else: tauPt_ESshifted = tree.tauPt else: puweight=1 tauPt_ESshifted = tree.tauPt - vvlooseWP = tree.byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 - vlooseWP = tree.byVLooseIsolationMVArun2017v2DBoldDMwLT2017 - looseWP = tree.byLooseIsolationMVArun2017v2DBoldDMwLT2017 - mediumWP = tree.byMediumIsolationMVArun2017v2DBoldDMwLT2017 + vvlooseWP = tree.byVVLooseIsolationMVArun2017v2DBoldDMwLT2017 + vlooseWP = tree.byVLooseIsolationMVArun2017v2DBoldDMwLT2017 + looseWP = tree.byLooseIsolationMVArun2017v2DBoldDMwLT2017 + mediumWP = tree.byMediumIsolationMVArun2017v2DBoldDMwLT2017 tightWP = tree.byTightIsolationMVArun2017v2DBoldDMwLT2017 - vtightWP = tree.byVTightIsolationMVArun2017v2DBoldDMwLT2017 - vvtightWP = tree.byVVTightIsolationMVArun2017v2DBoldDMwLT2017 - + vtightWP = tree.byVTightIsolationMVArun2017v2DBoldDMwLT2017 + vvtightWP = tree.byVVTightIsolationMVArun2017v2DBoldDMwLT2017 + if("Run2016BtoH" in filename or "190306" in filename): hasHLTditauPath_3or4 = tree.hasHLTditauPath_3or4 hasHLTetauPath_0and1 = tree.hasHLTetauPath_0and1 @@ -159,10 +164,10 @@ Nevents = tree.EventNumber Nevts =Nevts + 1 - + #bkgSubW = 1. if tree.isOS else -1. weight = tree.bkgSubW*puweight - + if("Run2018A" in filename or "Autumn18" in filename): HLTHPSpaths18 = [hasHLTditauPath_15or20HPS, hasHLTmutauPath_14HPS , hasHLTetauPath_14HPS ] HLTpaths18 = [hasHLTditauPath_4or5or6noHPS, hasHLTmutauPath_8noHPS, hasHLTetauPath_8noHPS] @@ -172,7 +177,7 @@ HLTpaths16 = [hasHLTditauPath_3or4, hasHLTmutauPath_1 , hasHLTetauPath_0and1 ] WPoints = [vvlooseWP, vlooseWP, looseWP, mediumWP, tightWP, vtightWP, vvtightWP] - + oneProng = False oneProngPiZero= False threeProng = False @@ -183,7 +188,7 @@ oneProngPiZero = True if (tauDM ==10): threeProng = True - + DMs = [oneProng, oneProngPiZero, threeProng] # Filling the histograms @@ -324,17 +329,19 @@ for WPind, wp in enumerate(WPs): for index, typ in enumerate(types): - + g_efficiency =TGraphAsymmErrors() g_efficiency.BayesDivide(hPtNum[ipath][index][WPind],hPtDen[ipath][index][WPind]) - funct = functions(g_efficiency, trigger + "Efficiency_" + wp +"_"+ typ, 0, 0, 0, 0, 0, 0, 0) - h_efficiency = funct.getTH1FfromTGraphAsymmErrors() - + funct = functions(g_efficiency, trigger + "Efficiency_" + wp +"_"+ typ, 0, 0, 0, 0, 0, 0, 0) + h_efficiency = funct.getTH1FfromTGraphAsymmErrors() + # write the histograms/graphs into the output ROOT file before the fit g_efficiency.Write(trigger +"_gEfficiency_" + wp +"_"+ typ) h_efficiency.Write(trigger +"_hEfficiency_" + wp +"_"+ typ) - + hPtNum[ipath][index][WPind].Write(trigger +"_pass_" + wp +"_"+ typ) + hPtDen[ipath][index][WPind].Write(trigger +"_total_" + wp +"_"+ typ) + # Set the title of the histograms/graphs and their axes g_efficiency.SetTitle(trigger +"Path_" + wp +"_"+ typ) g_efficiency.GetYaxis().SetTitle("Efficiency") @@ -342,16 +349,17 @@ h_efficiency.SetTitle(trigger +"Path_" + wp +"_"+ typ) h_efficiency.GetYaxis().SetTitle("Efficiency") h_efficiency.GetXaxis().SetTitle("Offline p_{T}^{#tau} [GeV]") - + # per DM efficiencies for idm, DM in enumerate(tauDMs): g_efficiencyDM = TGraphAsymmErrors() g_efficiencyDM.BayesDivide(hPtNumDM[ipath][index][WPind][idm],hPtDenDM[ipath][index][WPind][idm]) - funct2 = functions(g_efficiencyDM, trigger + "_Efficiency" + wp +"_"+ typ + "_" + DM, idm, 0 ,0, 0, 0, 0, 0) + funct2 = functions(g_efficiencyDM, trigger + "_Efficiency" + wp +"_"+ typ + "_" + DM, idm, 0 ,0, 0, 0, 0, 0) h_efficiencyDM = funct2.getTH1FfromTGraphAsymmErrors() g_efficiencyDM.Write(trigger +"_gEfficiency_" + wp +"_" + typ + "_" + DM) h_efficiencyDM.Write(trigger +"_hEfficiency_" + wp +"_" + typ + "_" + DM) + hPtNumDM[ipath][index][WPind][idm].Write(trigger +"_pass_" + wp +"_" + typ + "_" + DM) + hPtDenDM[ipath][index][WPind][idm].Write(trigger +"_total_" + wp +"_" + typ + "_" + DM) file.Close() print "The output ROOT file has been created: ../data/" + outputname - diff --git a/processNtuples_stage2.py b/processNtuples_stage2.py new file mode 100644 index 00000000000..b29ad7f0ba2 --- /dev/null +++ b/processNtuples_stage2.py @@ -0,0 +1,168 @@ +import os, subprocess, sys + +Area = "/hdfs/local/ram/run2_ntuples" + +Eras = ["2016", "2017", "2018"] +Sidebands = ["signal", "w_enriched", "OS_low_mT", "SS_low_mT", "OS_high_mT", "SS_high_mT"] +Types = ["data", "ztt_mc", "zmm_mc", "w_mc", "ttbar_mc"] +Input_File_Label = ["SingleMuon", "DYJetsToLL", "WJetsToLNu", "TTTo2L2Nu", "TTToSemiLeptonic"] +Output_File_Label = ["DATA", "DY", "W", "TT_2L", "TT_SL"] + +Lumiscale = { ## Defined as (x-sec * Integ.Lumi)/Nevt + "2016": { + "DATA": 1.0, + "DY": 1.49, + "W": 25.41, + "TT_2L": 0.047, + "TT_SL": 0.122 + }, + "2017": { + "DATA": 1.0, + "DY": 2.58, + "W": 56.89, + "TT_2L": 0.053, + "TT_SL": 0.13 + }, + "2018": { + "DATA": 1.0, + "DY": 3.66, + "W": 51.71, + "TT_2L": 0.082, + "TT_SL": 0.214 + } +} + + + +Lumiscale_num = { ## Defined as (x-sec * Integ.Lumi) + "2016": { + "DATA": 1.0, + "DY": 218172198.0, + "W": 2208808530.0, + "TT_2L": 3173560.0, + "TT_SL": 13122168.0 + }, + "2017": { + "DATA": 1.0, + "DY": 252204630.0, + "W": 2553358050.0, + "TT_2L": 3668600.0, + "TT_SL": 15169080.0 + }, + "2018": { + "DATA": 1.0, + "DY": 362810034.0, + "W": 3673143990.0, + "TT_2L": 5277480.0, + "TT_SL": 21821544.0 + } +} + +N_total = { ## Total number of unweighted MC events + "2016": { + "DATA": 1.0, + "DY": 146280395.0, + "W": 86916455.0, + "TT_2L": 67926800.0, + "TT_SL": 107604800.0 + }, + "2017": { + "DATA": 1.0, + "DY": 97800939.0, + "W": 44881137.0, + "TT_2L": 69155808.0, + "TT_SL": 110014744.0 + }, + "2018": { + "DATA": 1.0, + "DY": 99100315.0, + "W": 71026861.0, + "TT_2L": 64310000.0, + "TT_SL": 101550000.0 + } +} + + + + + +def run_cmd(command): + print "executing command = '%s'" % command + p = subprocess.Popen(command, shell = True, stdout = subprocess.PIPE, stderr = subprocess.PIPE) + stdout, stderr = p.communicate() + return stdout + +print("Creating directories to store the skimmed Ntuples (if they are not already created)") +run_cmd('mkdir -p tuples') +run_cmd('mkdir -p tuples/skimmed-2016') +run_cmd('mkdir -p tuples/skimmed-2017') +run_cmd('mkdir -p tuples/skimmed-2018') +print("Clearing up old stuff (if present)") +run_cmd('rm tuples/skimmed-201*/*') + + +for era in Eras: + for sample in Types: + for sideband in Sidebands: + print("Era: {}, Type: {}, Sideband: {}".format(era, sample, sideband)) + if(sample == "data"): + ID = 0 + print("Running skimTuple_NEW.py for ID: {}, Input_File_Label[ID]: {}, Output_File_Label[ID]: {}".format(str(ID), Input_File_Label[ID], Output_File_Label[ID])) + run_cmd("python TauTagAndProbe/python/skimTuple.py --input {}/full/{}/{}* --config TauTagAndProbe/data/{}/triggers.json --selection {} --output tuples/skimmed-{}/{}-{}-{}.root --type {} --sideband {} --lumiScale {}".format( Area, era, Input_File_Label[ID], era, "DeepTau", era, Output_File_Label[ID], sample, sideband, sample, sideband, str(Lumiscale[era][Output_File_Label[ID]]) )) + elif((sample == "ztt_mc") or (sample == "zmm_mc")): + ID = 1 + print("Running skimTuple_NEW.py for ID: {}, Input_File_Label[ID]: {}, Output_File_Label[ID]: {}".format(str(ID), Input_File_Label[ID], Output_File_Label[ID])) + run_cmd("python TauTagAndProbe/python/skimTuple.py --input {}/full/{}/{}* --config TauTagAndProbe/data/{}/triggers.json --selection {} --output tuples/skimmed-{}/{}-{}-{}.root --type {} --pu {}/Pileup_Data{}.root --sideband {} --lumiScale {}".format( Area, era, Input_File_Label[ID], era, "DeepTau", era, Output_File_Label[ID], sample, sideband, sample, Area, era, sideband, str(Lumiscale[era][Output_File_Label[ID]]) )) + elif(sample == "w_mc"): + ID = 2 + print("Running skimTuple_NEW.py for ID: {}, Input_File_Label[ID]: {}, Output_File_Label[ID]: {}".format(str(ID), Input_File_Label[ID], Output_File_Label[ID])) + run_cmd("python TauTagAndProbe/python/skimTuple.py --input {}/full/{}/{}* --config TauTagAndProbe/data/{}/triggers.json --selection {} --output tuples/skimmed-{}/{}-{}-{}.root --type {} --pu {}/Pileup_Data{}.root --sideband {} --lumiScale {}".format( Area, era, Input_File_Label[ID], era, "DeepTau", era, Output_File_Label[ID], sample, sideband, sample, Area, era, sideband, str(Lumiscale[era][Output_File_Label[ID]]) )) + elif(sample == "ttbar_mc"): + ID = 3 + print("Running skimTuple_NEW.py for ID: {}, Input_File_Label[ID]: {}, Output_File_Label[ID]: {}".format(str(ID), Input_File_Label[ID], Output_File_Label[ID])) + run_cmd("python TauTagAndProbe/python/skimTuple.py --input {}/full/{}/{}* --config TauTagAndProbe/data/{}/triggers.json --selection {} --output tuples/skimmed-{}/{}-{}-{}.root --type {} --pu {}/Pileup_Data{}.root --sideband {} --lumiScale {}".format( Area, era, Input_File_Label[ID], era, "DeepTau", era, Output_File_Label[ID], sample, sideband, sample, Area, era, sideband, str(Lumiscale[era][Output_File_Label[ID]]) )) + ID = 4 + print("Running skimTuple_NEW.py for ID: {}, Input_File_Label[ID]: {}, Output_File_Label[ID]: {}".format(str(ID), Input_File_Label[ID], Output_File_Label[ID])) + run_cmd("python TauTagAndProbe/python/skimTuple.py --input {}/full/{}/{}* --config TauTagAndProbe/data/{}/triggers.json --selection {} --output tuples/skimmed-{}/{}-{}-{}.root --type {} --pu {}/Pileup_Data{}.root --sideband {} --lumiScale {}".format( Area, era, Input_File_Label[ID], era, "DeepTau", era, Output_File_Label[ID], sample, sideband, sample, Area, era, sideband, str(Lumiscale[era][Output_File_Label[ID]]) )) + + print("Adding all root files for era {}".format(era)) + run_cmd("hadd -f input_stage2p5_{}.root tuples/skimmed-{}/*.root".format(era, era)) + + + print("Creating input files for CreateTrunOn.py script for era {} and mode {}".format(era, "subtract-from-data")) + run_cmd("python TauTagAndProbe/python/estimateBackgrounds.py --input input_stage2p5_{}.root --mode {} --output-data estimateBackgrounds_{}_DATA_mode_{} --output-dy-mc estimateBackgrounds_{}_DY_MC_mode_{} --output-w-enriched w_enriched_{} --output-signal signal_{}".format(era, "subtract-from-data", era, "subtract-from-data", era, "subtract-from-data", era, era)) + + print("Creating input files for CreateTrunOn.py script for era {} and mode {}".format(era, "add-to-dy-mc")) + run_cmd("python TauTagAndProbe/python/estimateBackgrounds.py --input input_stage2p5_{}.root --mode {} --output-data estimateBackgrounds_{}_DATA_mode_{} --output-dy-mc estimateBackgrounds_{}_DY_MC_mode_{}".format(era, "add-to-dy-mc", era, "add-to-dy-mc", era, "add-to-dy-mc")) + + print("Running CreateTrunOn.py script for era {} and {} region".format(era, "signal")) + run_cmd("python TauTagAndProbe/python/createTrunOn.py --input-data signal_{}_data.root --input-dy-mc signal_{}_ztt_mc.root --output turn_on_{}_signal --branchname-weight-data {} --branchname-weight-dy-mc {}".format(era, era, era, "weight", "weight")) + + print("Running CreateTrunOn.py script for era {} and {} region".format(era, "w-enriched")) + run_cmd("python TauTagAndProbe/python/createTrunOn.py --input-data w_enriched_{}_data.root --input-dy-mc w_enriched_{}_mc.root --output turn_on_{}_w_enriched --branchname-weight-data {} --branchname-weight-dy-mc {}".format(era, era, era, "weight", "weight")) + + print("Running CreateTrunOn.py script for era {} and mode {}".format(era, "subtract-from-data")) + run_cmd("python TauTagAndProbe/python/createTrunOn.py --input-data estimateBackgrounds_{}_DATA_mode_{}.root --input-dy-mc estimateBackgrounds_{}_DY_MC_mode_{}.root --output turn_on_{}_{}_LATEST --branchname-weight-data {} --branchname-weight-dy-mc {} ".format(era, "subtract-from-data", era, "subtract-from-data", era, "subtract-from-data", "final_weight", "weight")) + + print("Running CreateTrunOn.py script for era {} and mode {}".format(era, "add-to-dy-mc")) + run_cmd("python TauTagAndProbe/python/createTrunOn.py --input-data estimateBackgrounds_{}_DATA_mode_{}.root --input-dy-mc estimateBackgrounds_{}_DY_MC_mode_{}.root --output turn_on_{}_{}_LATEST --branchname-weight-data {} --branchname-weight-dy-mc {} ".format(era, "add-to-dy-mc", era, "add-to-dy-mc", era, "add-to-dy-mc", "weight", "final_weight")) + + print("Running fitTurnOn.py script for era {} and {} region".format(era, "signal")) + run_cmd("python TauTagAndProbe/python/fitTurnOn.py --input turn_on_{}_{}.root --output turn_on_{}_{}_fitted --mode {}".format(era, "signal", era, "signal", "fixed")) + + print("Running fitTurnOn.py script for era {} and {} region".format(era, "w_enriched")) + run_cmd("python TauTagAndProbe/python/fitTurnOn.py --input turn_on_{}_{}.root --output turn_on_{}_{}_fitted --mode {}".format(era, "w_enriched", era, "w_enriched", "fixed")) + + print("Running fitTurnOn.py script for era {} and mode {}".format(era, "subtract-from-data")) + run_cmd("python TauTagAndProbe/python/fitTurnOn.py --input turn_on_{}_{}_LATEST.root --output turn_on_{}_{}_fitted_LATEST --mode {}".format(era, "subtract-from-data", era, "subtract-from-data", "adaptive")) + + print("Running fitTurnOn.py script for era {} and mode {}".format(era, "add-to-dy-mc")) + run_cmd("python TauTagAndProbe/python/fitTurnOn.py --input turn_on_{}_{}_LATEST.root --output turn_on_{}_{}_fitted_LATEST --mode {}".format(era, "add-to-dy-mc", era, "add-to-dy-mc", "adaptive")) + + print("Running computeTriggerSFs.py script for era {}".format(era)) + run_cmd("python TauTagAndProbe/python/computeTriggerSFs.py --input_stage2p5 input_stage2p5_{}.root --input_signal turn_on_{}_signal_fitted.root --input_w_enriched turn_on_{}_w_enriched_fitted.root --output NewTriggerSFs_{}".format(era, era, era, era)) + + print("Making directory {} to store the final plots and SF root files".format("Tau_Trigger_sf_plots")) + run_cmd("mkdir -p $PWD/{}".format("Tau_Trigger_sf_plots")) + print("Running TriggerSF_plotter.py script for era {}".format(era)) + run_cmd("python TauTagAndProbe/python/TriggerSF_plotter.py --era {} --inputFilePath-new $PWD --outputFilePath $PWD/{}".format(era, "Tau_Trigger_sf_plots"))