From 104eb948a4947cbd64ef552e55e519847878f8e2 Mon Sep 17 00:00:00 2001 From: Eduardo Rodrigues Date: Thu, 10 Dec 2020 08:41:47 +0100 Subject: [PATCH] Release 0.10.0 (#115) * Depend on Particle 0.14 * Try to fix README display on PyPI * Bump version number * CHANGELOG update * Copy .pre-commit-config.yaml file from Particle, to please the CI * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [CI skip] CHANGELOG update Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .github/workflows/wheel.yml | 2 +- .pre-commit-config.yaml | 32 ++ CHANGELOG.md | 11 +- README.md | 4 +- decaylanguage/__init__.py | 1 + decaylanguage/__main__.py | 6 +- decaylanguage/_version.py | 5 +- decaylanguage/data/DECAY_BELLE2.DEC | 428 +++++++++++----------- decaylanguage/data/__init__.py | 1 + decaylanguage/dec/__init__.py | 1 + decaylanguage/dec/dec.py | 319 ++++++++++------ decaylanguage/dec/decparser.py | 304 ++++++++------- decaylanguage/dec/enums.py | 5 +- decaylanguage/decay/__init__.py | 2 +- decaylanguage/decay/decay.py | 133 +++---- decaylanguage/decay/viewer.py | 104 ++++-- decaylanguage/modeling/__init__.py | 1 + decaylanguage/modeling/ampgen2goofit.py | 48 +-- decaylanguage/modeling/ampgentransform.py | 40 +- decaylanguage/modeling/amplitudechain.py | 153 ++++---- decaylanguage/modeling/decay.py | 40 +- decaylanguage/modeling/goofit.py | 303 +++++++++------ decaylanguage/utils/__init__.py | 1 + decaylanguage/utils/errors.py | 5 +- decaylanguage/utils/particleutils.py | 9 +- decaylanguage/utils/utilities.py | 27 +- docs/conf.py | 60 +-- docs/index.rst | 1 - docs/installation.rst | 2 - docs/requirements.txt | 2 +- models/DtoKpipipi_v2.txt | 424 ++++++++++----------- notebooks/DtoKpipipi_v2.cu | 10 +- notebooks/simple_model.txt | 12 +- setup.py | 102 +++--- tests/dec/test_dec.py | 360 +++++++++++------- tests/dec/test_issues.py | 3 +- tests/decay/test_decay.py | 250 ++++++++----- tests/decay/test_viewer.py | 74 ++-- tests/output/DtoKpipipi_v2.cu | 10 +- tests/test_convert.py | 10 +- tests/test_dec_full.py | 12 +- tests/test_decaylanguage.py | 1 + tests/test_goofit.py | 9 +- 43 files changed, 1914 insertions(+), 1413 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.github/workflows/wheel.yml b/.github/workflows/wheel.yml index d4920699..1c0470fa 100644 --- a/.github/workflows/wheel.yml +++ b/.github/workflows/wheel.yml @@ -23,7 +23,7 @@ jobs: - name: Build SDist run: python setup.py sdist - + - uses: actions/upload-artifact@v2 with: path: dist/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..763b80b4 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,32 @@ + +repos: +- repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.3.0 + hooks: + - id: check-added-large-files + args: ['--maxkb=1000'] + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-merge-conflict + - id: check-case-conflict + - id: check-symlinks + - id: check-yaml + - id: requirements-txt-fixer + - id: debug-statements + - id: end-of-file-fixer + - id: fix-encoding-pragma +- repo: https://github.com/mgedmin/check-manifest + rev: "0.45" + hooks: + - id: check-manifest + stages: [manual] +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.790 + hooks: + - id: mypy + files: src + additional_dependencies: [attrs==19.3.0] diff --git a/CHANGELOG.md b/CHANGELOG.md index b331ae7a..e39c429a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,16 @@ # Changelog +## Version 0.10.0 (2020-12-10) + +* Dependencies: + - Package dependent on ``Particle`` version 0.14. +* Miscellaneous: + - Pre-commit hooks added - Black formatting, check-manifest, etc. + ## Version 0.9.1 (2020-11-04) * Parsing of decay files (aka .dec files): - - ``DecFileParser`` class enhanced to understand the CopyDecay statement. + - ``DecFileParser`` class enhanced to understand EvtGen's CopyDecay statement in decay files. * Tests: - Added tests for Python 3.8 and 3.9 on Windows. * Miscellaneous: @@ -50,7 +57,7 @@ * Universal representation of decay chains: - Classes ``DecayChain``, ``DecayMode``, ``DaughtersDict`` and ``DecayChainViewer`` enhanced. * Dependencies and Python version support: - - Package dependent on ``Particle`` versions 0.9.*. + - Package dependent on ``Particle`` versions 0.9. - Support for Python 3.8 added. diff --git a/README.md b/README.md index d592d931..e09d9949 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![DecayLanguage](https://raw.githubusercontent.com/scikit-hep/decaylanguage/master/images/DecayLanguage.png)](https://decaylanguage.readthedocs.io/en/latest/) +DecayLanguage logo # DecayLanguage: describe, manipulate and convert particle decays @@ -17,8 +17,6 @@ [![Binder demo](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/scikit-hep/decaylanguage/master?urlpath=lab/tree/notebooks/DecayLanguageDemo.ipynb) - - DecayLanguage implements a language to describe and convert particle decays between digital representations, effectively making it possible to interoperate several fitting programs. Particular interest is given to programs dedicated diff --git a/decaylanguage/__init__.py b/decaylanguage/__init__.py index 8524ae6d..08fb09be 100644 --- a/decaylanguage/__init__.py +++ b/decaylanguage/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE diff --git a/decaylanguage/__main__.py b/decaylanguage/__main__.py index eabb7c07..223a35d1 100755 --- a/decaylanguage/__main__.py +++ b/decaylanguage/__main__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# coding: utf-8 +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -15,10 +15,10 @@ class DecayLanguageDecay(cli.Application): - generator = cli.SwitchAttr(['-G', '--generator'], cli.Set('goofit'), mandatory=True) + generator = cli.SwitchAttr(["-G", "--generator"], cli.Set("goofit"), mandatory=True) def main(self, filename): - if self.generator == 'goofit': + if self.generator == "goofit": ampgen2goofit(filename) diff --git a/decaylanguage/_version.py b/decaylanguage/_version.py index 940a98d5..d6fe9bde 100644 --- a/decaylanguage/_version.py +++ b/decaylanguage/_version.py @@ -1,10 +1,11 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE # or https://github.com/scikit-hep/decaylanguage for details. -__version__ = '0.9.1' +__version__ = "0.10.0" version = __version__ -version_info = __version__.split('.') +version_info = __version__.split(".") diff --git a/decaylanguage/data/DECAY_BELLE2.DEC b/decaylanguage/data/DECAY_BELLE2.DEC index 95774758..f8889719 100644 --- a/decaylanguage/data/DECAY_BELLE2.DEC +++ b/decaylanguage/data/DECAY_BELLE2.DEC @@ -5,7 +5,7 @@ # # October 2016, Umberto Tamponi (tamponi@to.infn.it) # Light meson decays (rho, eta, phi, f'_0...) updated to PDG 2015 -# New Dalitz modes for eta and eta' +# New Dalitz modes for eta and eta' # # Updates listed below are tabulated on https://confluence.desy.de/display/BI/EvtGen+Generator+Task-Force # See individual pull requests for details. @@ -256,31 +256,31 @@ yesPhotos # # Use VSS_BMIX mixing decay model (DK,28-Oct-1999) Decay Upsilon(4S) -0.515122645 B+ B- VSS; -0.483122645 B0 anti-B0 VSS_BMIX dm; -0.000015583 e+ e- VLL; -0.000015766 mu+ mu- VLL; -0.000015766 tau+ tau- VLL; -0.000084099 Upsilon(2S) pi+ pi- VVPIPI; -0.000044342 Upsilon(2S) pi0 pi0 VVPIPI; -0.000080123 Upsilon pi+ pi- VVPIPI; -0.000044342 Upsilon pi0 pi0 VVPIPI; -0.000194392 Upsilon eta PARTWAVE 0.0 0.0 1.0 0.0 0.0 0.0; +0.515122645 B+ B- VSS; +0.483122645 B0 anti-B0 VSS_BMIX dm; +0.000015583 e+ e- VLL; +0.000015766 mu+ mu- VLL; +0.000015766 tau+ tau- VLL; +0.000084099 Upsilon(2S) pi+ pi- VVPIPI; +0.000044342 Upsilon(2S) pi0 pi0 VVPIPI; +0.000080123 Upsilon pi+ pi- VVPIPI; +0.000044342 Upsilon pi0 pi0 VVPIPI; +0.000194392 Upsilon eta PARTWAVE 0.0 0.0 1.0 0.0 0.0 0.0; # BF ~ (2J+1)E^3_gamma; see PRL 94, 032001 # V-> gamma S Partial wave (L,S)=(0,0) -0.000092625 gamma chi_b0(3P) HELAMP 1. 0. 1. 0.; +0.000092625 gamma chi_b0(3P) HELAMP 1. 0. 1. 0.; # V-> gamma V Partial wave (L,S)=(0,1) -0.000138938 gamma chi_b1(3P) HELAMP 1. 0. 1. 0. -1. 0. -1. 0.; +0.000138938 gamma chi_b1(3P) HELAMP 1. 0. 1. 0. -1. 0. -1. 0.; # V-> gamma T Partial wave (L,S)=(0,1) -0.000129084 gamma chi_b2(3P) HELAMP 2.4494897 0. 1.7320508 0. 1. 0. 1. 0. 1.7320508 0. 2.4494897 0.; +0.000129084 gamma chi_b2(3P) HELAMP 2.4494897 0. 1.7320508 0. 1. 0. 1. 0. 1.7320508 0. 2.4494897 0.; # V-> gamma S Partial wave (L,S)=(0,0) -0.000002956 gamma chi_b0(2P) HELAMP 1. 0. 1. 0.; +0.000002956 gamma chi_b0(2P) HELAMP 1. 0. 1. 0.; # V-> gamma V Partial wave (L,S)=(0,1) -0.000007883 gamma chi_b1(2P) HELAMP 1. 0. 1. 0. -1. 0. -1. 0.; +0.000007883 gamma chi_b1(2P) HELAMP 1. 0. 1. 0. -1. 0. -1. 0.; # V-> gamma T Partial wave (L,S)=(0,1) -0.000011825 gamma chi_b2(2P) HELAMP 2.4494897 0. 1.7320508 0. 1. 0. 1. 0. 1.7320508 0. 2.4494897 0.; -0.000837571 g g g PYTHIA 4; -0.000039415 gamma g g PYTHIA 4; +0.000011825 gamma chi_b2(2P) HELAMP 2.4494897 0. 1.7320508 0. 1. 0. 1. 0. 1.7320508 0. 2.4494897 0.; +0.000837571 g g g PYTHIA 4; +0.000039415 gamma g g PYTHIA 4; Enddecay @@ -1758,8 +1758,8 @@ Decay anti-B0 0.0000017 anti-K''*0 gamma SVP_HELAMP 1.0 0.0 1.0 0.0; 0.0002735 anti-Xsd gamma BTOXSGAMMA 2 ; #[Reconstructed PDG2016] BF = 3.49e-4 - Sum of exclusive b -> s gamma modes -0.000000160 anti-K0 e+ e- BTOSLLBALL; -0.000001030 anti-K*0 e+ e- BTOSLLBALL; +0.000000160 anti-K0 e+ e- BTOSLLBALL; +0.000001030 anti-K*0 e+ e- BTOSLLBALL; 0.00000551 anti-Xsd e+ e- BTOXSLL 4.8 0.2 0.0 0.41; #[Reconstructed PDG2016] BF = 6.7e-6 - Sum of exclusive b -> s e e modes 0.000000021 pi+ pi- e+ e- PHSP; #[Reconstructed PDG2016] #By lepton universality from mu mu BF 0.000000339 anti-K0 mu+ mu- BTOSLLBALL; #[Reconstructed PDG2016] @@ -4151,43 +4151,43 @@ Decay B- 0.000026 pi- pi0 pi0 CB3PI-P00 alpha; 0.000000 pi- pi- pi+ CB3PI-MPP alpha; -0.000004070 eta pi- PHSP; -0.000002330 eta K- PHSP; -0.000019300 K*- eta SVS; -0.000007000 rho- eta SVS; -0.000002700 eta' pi- PHSP; -0.000070600 eta' K- PHSP; -0.000004900 K*- eta' SVS; -0.000008700 rho- eta' SVS; -0.000006900 omega pi- SVS; -0.000006700 omega K- SVS; +0.000004070 eta pi- PHSP; +0.000002330 eta K- PHSP; +0.000019300 K*- eta SVS; +0.000007000 rho- eta SVS; +0.000002700 eta' pi- PHSP; +0.000070600 eta' K- PHSP; +0.000004900 K*- eta' SVS; +0.000008700 rho- eta' SVS; +0.000006900 omega pi- SVS; +0.000006700 omega K- SVS; 0.0000010 omega K*- SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; -0.000015900 omega rho- SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; +0.000015900 omega rho- SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; 0.0000000 phi pi- SVS; -0.000008300 phi K- SVS; +0.000008300 phi K- SVS; #2-body modes from Jim Olsen # (K_S0 K- and K_L0 K- -> 1x10^-6, Jim Olsen - Mar 27, 2001) # (pi- pi0 -> 5x10^-6 and K- pi0 -> 11x10^-6, Jim Olsen - Mar 27, 2001) # PR LHCb 04/08/2004 Split into Ks/KL 0.000000 anti-K0 pi- PHSP; -0.000011550 K_S0 pi- PHSP; -0.000011550 K_L0 pi- PHSP; -0.000001360 K0 K- PHSP; -0.000005700 pi- pi0 PHSP; -0.000012900 K- pi0 PHSP; +0.000011550 K_S0 pi- PHSP; +0.000011550 K_L0 pi- PHSP; +0.000001360 K0 K- PHSP; +0.000005700 pi- pi0 PHSP; +0.000012900 K- pi0 PHSP; # 3-body John Back (jback@slac.stanford.edu) - Oct 15, 2002 # JGS intersperses modes with pi0->eta,eta' # B- modes # rho0 3-body modes -0.000008300 rho0 pi- SVS; -0.000003700 rho0 K- SVS; +0.000008300 rho0 pi- SVS; +0.000003700 rho0 K- SVS; # rho- 3-body modes -0.000008000 rho- anti-K0 SVS; -0.000010900 rho- pi0 SVS; +0.000008000 rho- anti-K0 SVS; +0.000010900 rho- pi0 SVS; # rho(1450) 3-body modes 0.0000022 rho(2S)0 pi- SVS; @@ -4201,16 +4201,16 @@ Decay B- 0.000001 a_00 pi- PHSP; 0.000001 a_0- pi0 PHSP; -0.0000016 f_2 pi- PHSP; +0.0000016 f_2 pi- PHSP; # K*(1430) 3-body modes -0.000045 anti-K_0*0 pi- PHSP; +0.000045 anti-K_0*0 pi- PHSP; 0.000001 K_0*0 K- PHSP; 0.000002 K_0*- pi0 PHSP; 0.000002 K_0*- K0 PHSP; # K*-(892) 3-body modes -0.000006900 K*- pi0 SVS; +0.000006900 K*- pi0 SVS; 0.0000030 K*- K0 SVS; # Non-resonant 3-body left-overs @@ -4220,13 +4220,13 @@ Decay B- #0402270.000002 pi- pi+ pi- PHSP; # K- pi+ pi-: high mass + f0(400-1200) = (5+5)e-6 -0.000000000 K- pi+ pi- PHSP; +0.000000000 K- pi+ pi- PHSP; # K- K+ pi-: just non-resonant -0.000005000 K- K+ pi- PHSP; +0.000005000 K- K+ pi- PHSP; # K- K+ K-: high mass structure near 1500 + non-res: total - phiK = 30 - 4 -0.000025400 K- K+ K- PHSP; +0.000025400 K- K+ K- PHSP; # K-K-pi+: suppressed mode (1e-7) 0.0000001 K- K- pi+ PHSP; @@ -4267,7 +4267,7 @@ Decay B- #4-body modes from Andrei Gritsan #--- 4-body rho-rho, rho-pi-pi, pi-pi-pi-pi -------------------------- # PR LHCb 22 Apr 2004 Set long. pol. for rho rho -0.000024000 rho- rho0 SVV_HELAMP 0.0 0.0 1.0 0.0 0.0 0.0; +0.000024000 rho- rho0 SVV_HELAMP 0.0 0.0 1.0 0.0 0.0 0.0; 0.000010 rho0 pi- pi0 PHSP; 0.000005 rho0 pi- eta PHSP; 0.000002 rho0 pi- eta' PHSP; @@ -4289,8 +4289,8 @@ Decay B- 0.000002 pi- eta eta pi0 PHSP; 0.000002 pi- eta' pi0 pi0 PHSP; 0.000001 pi- eta' eta pi0 PHSP; -0.000020000 a_10 pi- SVS; -0.000026000 a_1- pi0 SVS; +0.000020000 a_10 pi- SVS; +0.000026000 a_1- pi0 SVS; 0.0000067 b_10 pi- SVS; 0.000010 b_1- pi0 SVS; 0.000010 rho- f_0 SVS; @@ -4302,11 +4302,11 @@ Decay B- 0.000001 a_0+ pi- pi- PHSP; #--- 4-body rho-K*, rho-pi-K, K*-pi-pi, pi-pi-pi-K ------------------- -0.000009200 rho- anti-K*0 SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; +0.000009200 rho- anti-K*0 SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; 0.000010 rho0 K*- SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; 0.000010 pi- pi0 anti-K*0 PHSP; 0.000005 pi- eta anti-K*0 PHSP; -0.000069800 pi+ pi- K*- PHSP; +0.000069800 pi+ pi- K*- PHSP; 0.000010 pi0 pi0 K*- PHSP; 0.000005 pi0 eta K*- PHSP; 0.000002 eta eta K*- PHSP; @@ -4340,9 +4340,9 @@ Decay B- 0.000006 rho0 K_0*- PHSP; 0.000010 pi- pi0 anti-K_0*0 PHSP; 0.000010 pi0 pi0 K_0*- PHSP; -0.000005200 K*- f_0 SVS; +0.000005200 K*- f_0 SVS; 0.000010 a_10 K- SVS; -0.000035000 a_1- anti-K0 SVS; +0.000035000 a_1- anti-K0 SVS; 0.0000091 b_10 K- SVS; 0.000010 b_1- anti-K0 SVS; 0.000005 K*- a_00 SVS; @@ -4382,12 +4382,12 @@ Decay B- 0.0000002 K*- K0 eta' PHSP; #--- 4-body phi-K*, phi-K-pi, K-K-K*, pi-K-K-K ----------------------- -0.000010000 phi K*- SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; +0.000010000 phi K*- SVV_HELAMP 1.0 0.0 1.7 0.0 1.0 0.0; 0.000010 phi anti-K0 pi- PHSP; 0.000010 phi K- pi0 PHSP; 0.000005 phi K- eta PHSP; 0.000002 phi K- eta' PHSP; -0.000026000 K- K+ K*- PHSP; +0.000026000 K- K+ K*- PHSP; 0.000010 K- K*+ K- PHSP; 0.000010 K- K*0 anti-K0 PHSP; 0.000010 K- K0 anti-K*0 PHSP; @@ -4400,7 +4400,7 @@ Decay B- 0.000005 K0 anti-K0 K- eta PHSP; 0.000002 K0 anti-K0 K- eta' PHSP; 0.000010 K0 anti-K0 anti-K0 pi- PHSP; -0.000007000 phi K_0*- PHSP; +0.000007000 phi K_0*- PHSP; 0.000010 K- K+ K_0*- PHSP; 0.000010 K- K- K_0*+ PHSP; 0.000010 K- anti-K_0*0 K0 PHSP; @@ -4699,7 +4699,7 @@ Decay B- 0.000002 K- K'_10 anti-K0 eta' PHSP; #--- 5-body phi-phi-K, phi-phi-pi, phi-3K, phi-K-K-pi, 5K, 4K-pi ------------ -0.000004900 phi phi K- PHSP; +0.000004900 phi phi K- PHSP; 0.000001 phi phi pi- PHSP; 0.000001 phi K+ K- K- PHSP; 0.000001 phi K0 anti-K0 K- PHSP; @@ -4725,9 +4725,9 @@ Decay B- # B -> cc= s sum = 1.92% # 0.001026 J/psi K- SVS; #[Reconstructed PDG2018] -0.001430 J/psi K*- SVV_HELAMP PKHminus PKphHminus PKHzero PKphHzero PKHplus PKphHplus; +0.001430 J/psi K*- SVV_HELAMP PKHminus PKphHminus PKHzero PKphHzero PKHplus PKphHplus; 0.000041 J/psi pi- SVS; #[Reconstructed PDG2018] -0.000050 J/psi rho- SVV_HELAMP PKHminus PKphHminus PKHzero PKphHzero PKHplus PKphHplus; +0.000050 J/psi rho- SVV_HELAMP PKHminus PKphHminus PKHzero PKphHzero PKHplus PKphHplus; 0.00113 J/psi anti-K0 pi- PHSP; 0.0001 J/psi K- pi0 PHSP; 0.0001 J/psi K'_1- SVV_HELAMP 0.5 0.0 1.0 0.0 0.5 0.0; @@ -4757,7 +4757,7 @@ Decay B- 0.0000040 psi(2S) phi K- PHSP; #Added by Yubo Li 03/09/2018 0.000960 eta_c K- PHSP; #[Reconstructed PDG2018] -0.001000 K*- eta_c SVS; +0.001000 K*- eta_c SVS; 0.0002 eta_c anti-K0 pi- PHSP; 0.000062 eta_c K- pi0 PHSP; #set as UL as PDG 0.0002 eta_c K- pi+ pi- PHSP; @@ -4797,7 +4797,7 @@ Decay B- 0.000100 chi_c2 K- pi0 pi0 PHSP; 0.000100 chi_c2 anti-K0 pi- pi0 PHSP; -0.00049 psi(3770) K- SVS; +0.00049 psi(3770) K- SVS; 0.0005 psi(3770) K*- PHSP; 0.0003 psi(3770) anti-K0 pi- PHSP; 0.0002 psi(3770) K- pi0 PHSP; @@ -4813,10 +4813,10 @@ Decay B- # b -> c (sc=) -> D Ds X Sum = 10% # -0.0100 D0 D_s- PHSP; -0.0082 D*0 D_s- SVS; -0.0076 D_s*- D0 SVS; -0.0171 D_s*- D*0 SVV_HELAMP 0.48 0.0 0.734 0.0 0.48 0.0; +0.0100 D0 D_s- PHSP; +0.0082 D*0 D_s- SVS; +0.0076 D_s*- D0 SVS; +0.0171 D_s*- D*0 SVV_HELAMP 0.48 0.0 0.734 0.0 0.48 0.0; 0.0006 D'_10 D_s- SVS; 0.0012 D'_10 D_s*- SVV_HELAMP 0.48 0.0 0.734 0.0 0.48 0.0; 0.0012 D_10 D_s- SVS; @@ -4845,16 +4845,16 @@ Decay B- 0.0017 D0 D- anti-K0 PHSP; 0.0052 D0 D*- anti-K0 PHSP; 0.0031 D*0 D- anti-K0 PHSP; -0.007800000 D*0 D*- anti-K0 PHSP; +0.007800000 D*0 D*- anti-K0 PHSP; # External+internal W-emission amplitude -0.002100000 D0 anti-D0 K- PHSP; +0.002100000 D0 anti-D0 K- PHSP; 0.0018 D*0 anti-D0 K- PHSP; -0.004700000 D0 anti-D*0 K- PHSP; -0.005300000 D*0 anti-D*0 K- PHSP; +0.004700000 D0 anti-D*0 K- PHSP; +0.005300000 D*0 anti-D*0 K- PHSP; # Internal W-emission amplitude (color suppressed modes) 0.0005 D- D+ K- PHSP; 0.0005 D*- D+ K- PHSP; -0.001500000 D- D*+ K- PHSP; +0.001500000 D- D*+ K- PHSP; 0.0015 D*- D*+ K- PHSP; 0.0025 D0 D- anti-K*0 PHSP; @@ -4873,42 +4873,42 @@ Decay B- 0.0010 D*- D*+ K*- PHSP; # B->D(*)D(*). See Ref [B1]: -0.000380000 D- D0 PHSP; -0.000390000 D*- D0 SVS; -0.000630000 D*0 D- SVS; -0.000810000 D*0 D*- SVV_HELAMP 0.47 0.0 0.96 0.0 0.56 0.0; +0.000380000 D- D0 PHSP; +0.000390000 D*- D0 SVS; +0.000630000 D*0 D- SVS; +0.000810000 D*0 D*- SVV_HELAMP 0.47 0.0 0.96 0.0 0.56 0.0; # B -> D(*) X Exclusive Modes -0.005190000 D*0 pi- SVS; -0.004840000 D0 pi- PHSP; -0.013400000 rho- D0 SVS; +0.005190000 D*0 pi- SVS; +0.004840000 D0 pi- PHSP; +0.013400000 rho- D0 SVS; # D* rho HELAMP parameters taken from ICHEP 98-852. -0.009800000 D*0 rho- SVV_HELAMP 0.228 0.95 0.932 0.0 0.283 1.13; +0.009800000 D*0 rho- SVV_HELAMP 0.228 0.95 0.932 0.0 0.283 1.13; 0.0005 D0 pi0 pi- PHSP; 0.0005 D*0 pi0 pi- PHSP; -0.001070000 D+ pi- pi- PHSP; -0.001350000 D*+ pi- pi- PHSP; +0.001070000 D+ pi- pi- PHSP; +0.001350000 D*+ pi- pi- PHSP; # D a1 updated Ref. [B1]: -0.004000000 a_1- D0 SVS; -0.000200000 D0 rho0 pi- PHSP; -0.006800000 D0 pi+ pi- pi- PHSP; +0.004000000 a_1- D0 SVS; +0.000200000 D0 rho0 pi- PHSP; +0.006800000 D0 pi+ pi- pi- PHSP; # SVV_HELAMP from factorization, recommendation # http://babar-hn.slac.stanford.edu:5090/HyperNews/get/event_gen/168.html: # updated Ref. [B1]: # October 26, 2004 Lange update -0.019000000 D*0 a_1- SVV_HELAMP 0.200 0.0 0.866 0.0 0.458 0.0; +0.019000000 D*0 a_1- SVV_HELAMP 0.200 0.0 0.866 0.0 0.458 0.0; 0.00042 D*0 rho0 pi- PHSP; -0.01030 D*0 pi+ pi- pi- PHSP; +0.01030 D*0 pi+ pi- pi- PHSP; 0.0020 D+ rho- pi- PHSP; 0.0020 D+ pi0 pi- pi- PHSP; 0.0020 D*+ rho- pi- PHSP; -0.0150 D*+ pi0 pi- pi- PHSP; +0.0150 D*+ pi0 pi- pi- PHSP; 0.0005 D*0 rho- pi0 PHSP; 0.0005 D*0 pi- pi0 pi0 PHSP; @@ -4927,93 +4927,93 @@ Decay B- # B->DK, recommendation # http://babar-hn.slac.stanford.edu:5090/HyperNews/get/event_gen/151.html: # update: Ref [B1]: -0.0003680 D0 K- PHSP; -0.0004210 D*0 K- SVS; -0.0005300 K*- D0 SVS; -0.0008100 D*0 K*- SVV_HELAMP 0.228 0.0 0.932 0.0 0.283 0.0; +0.0003680 D0 K- PHSP; +0.0004210 D*0 K- SVS; +0.0005300 K*- D0 SVS; +0.0008100 D*0 K*- SVV_HELAMP 0.228 0.0 0.932 0.0 0.283 0.0; 0.0000005 D- pi0 PHSP; 0.0000005 D*- pi0 SVS; 0.000011 D- anti-K0 PHSP; 0.000006 D*- anti-K0 SVS; 0.00075 D0 D_s0*- PHSP; 0.0009 D*0 D_s0*- SVS; -0.0031000 D_s1- D0 SVS; -0.0120000 D*0 D_s1- SVV_HELAMP 0.4904 0.0 0.7204 0.0 0.4904 0.0; +0.0031000 D_s1- D0 SVS; +0.0120000 D*0 D_s1- SVV_HELAMP 0.4904 0.0 0.7204 0.0 0.4904 0.0; 0.00055 D0 K- anti-K0 PHSP; 0.00075 D0 K- anti-K*0 PHSP; -0.00150 D*0 K- anti-K*0 PHSP; -0.00275 D0 omega pi- PHSP; -0.00450 D*0 omega pi- PHSP; +0.00150 D*0 K- anti-K*0 PHSP; +0.00275 D0 omega pi- PHSP; +0.00450 D*0 omega pi- PHSP; 0.00045 D0 D'_s1- PHSP; 0.00094 D*0 D'_s1- PHSP; # Lam_c X / Sigma_c X 4.0 % -0.032587684 cd_1 anti-uu_1 PYTHIA 23; +0.032587684 cd_1 anti-uu_1 PYTHIA 23; # Xi_c X 2.5% -0.008887593 cs_1 anti-uu_1 PYTHIA 23; +0.008887593 cs_1 anti-uu_1 PYTHIA 23; -0.222451667 anti-u d c anti-u PYTHIA 48; -0.042462776 anti-u d c anti-u PYTHIA 13; -0.022218888 anti-u s c anti-u PYTHIA 13; +0.222451667 anti-u d c anti-u PYTHIA 48; +0.042462776 anti-u d c anti-u PYTHIA 13; +0.022218888 anti-u s c anti-u PYTHIA 13; #lange - try to crank up the psi production.... -0.073075460 anti-c s c anti-u PYTHIA 13; -0.003950000 anti-c d c anti-u PYTHIA 13; -0.002962500 anti-u d u anti-u PYTHIA 48; -0.003950000 anti-c s u anti-u PYTHIA 48; +0.073075460 anti-c s c anti-u PYTHIA 13; +0.003950000 anti-c d c anti-u PYTHIA 13; +0.002962500 anti-u d u anti-u PYTHIA 48; +0.003950000 anti-c s u anti-u PYTHIA 48; # JGS 11/5/02 This and similar a few lines above have been divided by two # to solve a double-counting problem for this channel -0.002024389 anti-u u d anti-u PYTHIA 48; -0.000069145 anti-d d d anti-u PYTHIA 48; -0.000088900 anti-s s d anti-u PYTHIA 48; -0.002172556 anti-u u s anti-u PYTHIA 48; -0.001777537 anti-d d s anti-u PYTHIA 48; -0.001481296 anti-s s s anti-u PYTHIA 48; -0.004937500 s anti-u PYTHIA 32; - -0.000550000 D0 K- K0 PHSP; -0.000750000 D0 K- K*0 PHSP; -0.018000000 D*0 pi+ pi- pi- pi0 PHSP; -0.005700000 D*0 pi- pi- pi- pi+ pi+ PHSP; -0.002600000 D*+ pi- pi- pi- pi+ PHSP; -0.000180000 D_s+ pi- K- PHSP; -0.000145000 D_s*+ pi- K- PHSP; -0.000011000 D_s+ K- K- PHSP; -0.000018000 eta K_0*- PHSP; -0.000009100 eta K_2*- PHSP; -0.000024000 omega K_0*- PHSP; -0.000021000 omega K_2*- PHSP; -0.000010100 anti-K*0 pi- PHSP; -0.000001070 f_2 K- PHSP; -0.000005600 anti-K_2*0 pi- PHSP; -0.000001200 K*- anti-K*0 PHSP; -0.000006100 phi K_1- PHSP; -0.000008400 phi K_2*- PHSP; -0.000007900 eta K- gamma PHSP; +0.002024389 anti-u u d anti-u PYTHIA 48; +0.000069145 anti-d d d anti-u PYTHIA 48; +0.000088900 anti-s s d anti-u PYTHIA 48; +0.002172556 anti-u u s anti-u PYTHIA 48; +0.001777537 anti-d d s anti-u PYTHIA 48; +0.001481296 anti-s s s anti-u PYTHIA 48; +0.004937500 s anti-u PYTHIA 32; + +0.000550000 D0 K- K0 PHSP; +0.000750000 D0 K- K*0 PHSP; +0.018000000 D*0 pi+ pi- pi- pi0 PHSP; +0.005700000 D*0 pi- pi- pi- pi+ pi+ PHSP; +0.002600000 D*+ pi- pi- pi- pi+ PHSP; +0.000180000 D_s+ pi- K- PHSP; +0.000145000 D_s*+ pi- K- PHSP; +0.000011000 D_s+ K- K- PHSP; +0.000018000 eta K_0*- PHSP; +0.000009100 eta K_2*- PHSP; +0.000024000 omega K_0*- PHSP; +0.000021000 omega K_2*- PHSP; +0.000010100 anti-K*0 pi- PHSP; +0.000001070 f_2 K- PHSP; +0.000005600 anti-K_2*0 pi- PHSP; +0.000001200 K*- anti-K*0 PHSP; +0.000006100 phi K_1- PHSP; +0.000008400 phi K_2*- PHSP; +0.000007900 eta K- gamma PHSP; 0.000002900 eta' K- gamma PHSP; #[Reconstructed PDG2016] -0.000002700 phi K- gamma PHSP; -0.000001620 anti-p- p+ pi- PHSP; -0.000005900 anti-p- p+ K- PHSP; -0.000003600 anti-p- p+ K*- PHSP; -0.000002500 anti-p- Lambda0 gamma PHSP; -0.000003000 anti-p- Lambda0 pi0 PHSP; -0.000000000 anti-p- Lambda0 pi- pi+ PHSP; -0.000004800 anti-p- Lambda0 rho0 PHSP; -0.000002000 anti-p- Lambda0 f_2 PHSP; -0.000003400 anti-Lambda0 Lambda0 K- PHSP; -0.000002200 anti-Lambda0 Lambda0 K*- PHSP; -0.000280000 Lambda_c+ anti-p- pi- PHSP; -0.001800000 Lambda_c+ anti-p- pi- pi0 PHSP; -0.002300000 Lambda_c+ anti-p- pi- pi- pi+ PHSP; -0.000035000 Sigma_c0 anti-p- PHSP; -0.000440000 Sigma_c0 anti-p- pi0 PHSP; -0.000440000 Sigma_c0 anti-p- pi+ pi- PHSP; -0.000280000 Sigma_c++ anti-p- pi- pi- PHSP; +0.000002700 phi K- gamma PHSP; +0.000001620 anti-p- p+ pi- PHSP; +0.000005900 anti-p- p+ K- PHSP; +0.000003600 anti-p- p+ K*- PHSP; +0.000002500 anti-p- Lambda0 gamma PHSP; +0.000003000 anti-p- Lambda0 pi0 PHSP; +0.000000000 anti-p- Lambda0 pi- pi+ PHSP; +0.000004800 anti-p- Lambda0 rho0 PHSP; +0.000002000 anti-p- Lambda0 f_2 PHSP; +0.000003400 anti-Lambda0 Lambda0 K- PHSP; +0.000002200 anti-Lambda0 Lambda0 K*- PHSP; +0.000280000 Lambda_c+ anti-p- pi- PHSP; +0.001800000 Lambda_c+ anti-p- pi- pi0 PHSP; +0.002300000 Lambda_c+ anti-p- pi- pi- pi+ PHSP; +0.000035000 Sigma_c0 anti-p- PHSP; +0.000440000 Sigma_c0 anti-p- pi0 PHSP; +0.000440000 Sigma_c0 anti-p- pi+ pi- PHSP; +0.000280000 Sigma_c++ anti-p- pi- pi- PHSP; # Removing K pi pi gamma and K*0 pi gamma to avoid double counting with higher K states -#0.000007600 K- pi+ pi- gamma PHSP; -#0.000020000 anti-K*0 pi- gamma PHSP; -#0.000046000 anti-K0 pi- pi0 gamma PHSP; +#0.000007600 K- pi+ pi- gamma PHSP; +#0.000020000 anti-K*0 pi- gamma PHSP; +#0.000046000 anti-K0 pi- pi0 gamma PHSP; Enddecay @@ -5131,7 +5131,7 @@ Decay B_s0 # Sum = 11.44% # 2-body = 4.72% # more-body = 6.72% -0.0104 D_s- D_s+ PHSP; +0.0104 D_s- D_s+ PHSP; 0.0099 D_s*+ D_s- SVS; 0.0099 D_s*- D_s+ SVS; 0.0197 D_s*- D_s*+ SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; @@ -5224,7 +5224,7 @@ Decay B_s0 #[Updated BR according to PDG2017] 0.00033 J/psi eta' SVS; 0.00040 J/psi eta SVS; -0.00108 J/psi phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; +0.00108 J/psi phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; 0.00008 J/psi K0 SVS; 0.00079 J/psi K- K+ PHSP; 0.00070 J/psi anti-K0 K0 PHSP; @@ -5246,7 +5246,7 @@ Decay B_s0 # psi' = 0.34% CLNS 94/1315 0.000129 psi(2S) eta' SVS; 0.000330 psi(2S) eta SVS; -0.000540 psi(2S) phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; +0.000540 psi(2S) phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; 0.0003 psi(2S) K- K+ PHSP; 0.0003 psi(2S) anti-K0 K0 PHSP; 0.0003 psi(2S) K0 K- pi+ PHSP; @@ -5356,14 +5356,14 @@ Decay B_s0 #0.0037 D_s2*- rho+ STV; #whb: STV does not exist yet # 0.0027 D_s*- pi+ SVS; -0.0032 D_s- pi+ PHSP; +0.0032 D_s- pi+ PHSP; 0.0073 rho+ D_s- SVS; 0.0070 D_s*- rho+ SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; # 0.0085 a_1+ D_s- SVS; 0.0009 D_s- rho0 pi+ PHSP; 0.0009 D_s- rho+ pi0 PHSP; -0.0084 D_s- pi- pi+ pi+ PHSP; +0.0084 D_s- pi- pi+ pi+ PHSP; 0.0009 D_s- pi0 pi+ pi0 PHSP; # 0.0122 D_s*- a_1+ SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; @@ -5392,31 +5392,31 @@ Decay B_s0 # Start with B to baryons: # fkw 3/28/01 I don't know what I'm doing here!!! This needs to be checked!!! # Mark Whitehead 30/4/2010 Weight PYTHIA to get Total BF=100% -0.019574780 anti-cs_0 ud_0 PYTHIA 23; -0.039129957 anti-cs_1 ud_1 PYTHIA 23; +0.019574780 anti-cs_0 ud_0 PYTHIA 23; +0.039129957 anti-cs_1 ud_1 PYTHIA 23; # # Next come external W-emission: -0.301256716 u anti-d anti-c s PYTHIA 48; -0.048443906 u anti-d anti-c s PYTHIA 25; +0.301256716 u anti-d anti-c s PYTHIA 48; +0.048443906 u anti-d anti-c s PYTHIA 25; # Now the internal W-emission: -0.019086636 u anti-c anti-d s PYTHIA 48; +0.019086636 u anti-c anti-d s PYTHIA 48; # Then some b->u external W-emission with upper vertex charm -0.003912996 c anti-s anti-u s PYTHIA 48; +0.003912996 c anti-s anti-u s PYTHIA 48; # and finally some cabibbo suppressed external and internal W-emission -0.014683536 u anti-s anti-c s PYTHIA 48; -0.002152148 u anti-s anti-c s PYTHIA 25; -0.000880424 u anti-c anti-s s PYTHIA 48; +0.014683536 u anti-s anti-c s PYTHIA 48; +0.002152148 u anti-s anti-c s PYTHIA 25; +0.000880424 u anti-c anti-s s PYTHIA 48; # and some c cbar d stuff as well as c cbar s -0.005391151 c anti-d anti-c s PYTHIA 13; -0.001468354 c anti-d anti-c s PYTHIA 13; +0.005391151 c anti-d anti-c s PYTHIA 13; +0.001468354 c anti-d anti-c s PYTHIA 13; # and some miscellaneous charmless stuff -0.003521696 u anti-u anti-d s PYTHIA 48; -0.000684774 d anti-d anti-d s PYTHIA 48; -0.000880424 s anti-s anti-d s PYTHIA 48; -0.001956498 u anti-u anti-s s PYTHIA 48; -0.001565198 d anti-d anti-s s PYTHIA 48; -0.001271724 s anti-s anti-s s PYTHIA 48; -0.004891245 anti-s s PYTHIA 32; +0.003521696 u anti-u anti-d s PYTHIA 48; +0.000684774 d anti-d anti-d s PYTHIA 48; +0.000880424 s anti-s anti-d s PYTHIA 48; +0.001956498 u anti-u anti-s s PYTHIA 48; +0.001565198 d anti-d anti-s s PYTHIA 48; +0.001271724 s anti-s anti-s s PYTHIA 48; +0.004891245 anti-s s PYTHIA 32; # fkw 5/10/00 the b->ulnu decays are loosely modelled according to B0 in EvtGen 0.000200 K- e+ nu_e ISGW2; @@ -5447,14 +5447,14 @@ Decay B_s0 #*********************************************************** # Mark Whitehead 30/4/2010 Update K+K- # PR LHCb update Br Bs -> K- K+ -0.000033000 K- K+ PHSP; +0.000033000 K- K+ PHSP; # PR LHCb 4/07/04 Split in KS/KL 0.0000000 anti-K0 K0 PHSP; 0.0000100 K_S0 K_S0 PHSP; 0.0000100 K_L0 K_L0 PHSP; # # PR LHCb 04/07/04 update BR -0.000004900 pi+ K- PHSP; +0.000004900 pi+ K- PHSP; 0.0000002 pi0 anti-K0 PHSP; # PR LHCb 4/07/04 add Bs -> pi+ pi- 0.00000001 pi+ pi- PHSP; @@ -5501,7 +5501,7 @@ Decay B_s0 0.000006 K*- K*+ SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; # PR LHCb Update BR 0.000004 anti-K*0 K*0 SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; -0.000014000 phi phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; +0.000014000 phi phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; 0.000000004 phi anti-K*0 SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; # decays that go via b->d penguins @@ -5510,7 +5510,7 @@ Decay B_s0 #PR LHCb 04/08/2004 add Bs -> tau+ tau- (BR not to take too seriously) 0.0000000020 tau+ tau- PHSP; # PR LHCb 04/05/2004 : add Bs -> phi gamma -0.000057000 phi gamma SVP_HELAMP 1.0 0.0 1.0 0.0; +0.000057000 phi gamma SVP_HELAMP 1.0 0.0 1.0 0.0; # PR LHCb 04/08/2004 : add Bs -> phi mu mu, phi e e 0.0000023 phi e+ e- BTOSLLALI; 0.0000023 phi mu+ mu- BTOSLLALI; @@ -5519,7 +5519,7 @@ Decay B_s0 #-------------- # 257.122e-6 for all the charmless hadronic # -0.000150000 D_s+ K- PHSP; +0.000150000 D_s+ K- PHSP; Enddecay @@ -5657,7 +5657,7 @@ Decay anti-B_s0 # exclusive # Sum = 0.09% 0.00015 D_s*+ K- SVS; -0.00015 D_s+ K- PHSP; +0.00015 D_s+ K- PHSP; 0.00030 D_s*+ K*- SVV_HELAMP 0.228 0.0 0.932 0.0 0.0283 0.0; 0.00030 K*- D_s+ SVS; @@ -5808,14 +5808,14 @@ Decay anti-B_s0 #0.0037 D_s2*+ rho- STV; # whb: model doesn't exist # 0.0027 D_s*+ pi- SVS; -0.0032 D_s+ pi- PHSP; +0.0032 D_s+ pi- PHSP; 0.0073 rho- D_s+ SVS; 0.0070 D_s*+ rho- SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; # 0.0085 a_1- D_s+ SVS; 0.0009 D_s+ rho0 pi- PHSP; 0.0009 D_s+ rho- pi0 PHSP; -0.0084 D_s+ pi+ pi- pi- PHSP; +0.0084 D_s+ pi+ pi- pi- PHSP; 0.0009 D_s+ pi0 pi- pi0 PHSP; # 0.0122 D_s*+ a_1- SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; @@ -5844,31 +5844,31 @@ Decay anti-B_s0 # Start with B to baryons: # fkw 3/28/01 I don't know what I'm doing here!!! This needs to be checked!!! # Mark Whitehead 30/4/2010 Weighted PYHTIA to get total BF = 100% -0.019574780 cs_0 anti-ud_0 PYTHIA 23; -0.039129957 cs_1 anti-ud_1 PYTHIA 23; +0.019574780 cs_0 anti-ud_0 PYTHIA 23; +0.039129957 cs_1 anti-ud_1 PYTHIA 23; # # Next come external W-emission: -0.301256716 anti-u d c anti-s PYTHIA 48; -0.048443906 anti-u d c anti-s PYTHIA 25; +0.301256716 anti-u d c anti-s PYTHIA 48; +0.048443906 anti-u d c anti-s PYTHIA 25; # Now the internal W-emission: -0.019086636 anti-u c d anti-s PYTHIA 48; +0.019086636 anti-u c d anti-s PYTHIA 48; # Then some b->u external W-emission with upper vertex charm -0.003912996 anti-c s u anti-s PYTHIA 48; +0.003912996 anti-c s u anti-s PYTHIA 48; # and finally some cabibbo suppressed external and internal W-emission -0.014683536 anti-u s c anti-s PYTHIA 48; -0.002152148 anti-u s c anti-s PYTHIA 25; -0.000880424 anti-u c s anti-s PYTHIA 48; +0.014683536 anti-u s c anti-s PYTHIA 48; +0.002152148 anti-u s c anti-s PYTHIA 25; +0.000880424 anti-u c s anti-s PYTHIA 48; # and some c cbar d stuff as well as c cbar s -0.005391151 anti-c d c anti-s PYTHIA 13; -0.001468354 anti-c d c anti-s PYTHIA 13; +0.005391151 anti-c d c anti-s PYTHIA 13; +0.001468354 anti-c d c anti-s PYTHIA 13; # and some miscellaneous charmless stuff -0.003521696 anti-u u d anti-s PYTHIA 48; -0.000684774 anti-d d d anti-s PYTHIA 48; -0.000880424 anti-s s d anti-s PYTHIA 48; -0.001956498 anti-u u s anti-s PYTHIA 48; -0.001565198 anti-d d s anti-s PYTHIA 48; -0.001271724 anti-s s s anti-s PYTHIA 48; -0.004891245 s anti-s PYTHIA 32; +0.003521696 anti-u u d anti-s PYTHIA 48; +0.000684774 anti-d d d anti-s PYTHIA 48; +0.000880424 anti-s s d anti-s PYTHIA 48; +0.001956498 anti-u u s anti-s PYTHIA 48; +0.001565198 anti-d d s anti-s PYTHIA 48; +0.001271724 anti-s s s anti-s PYTHIA 48; +0.004891245 s anti-s PYTHIA 32; # fkw 5/10/00 the b->ulnu decays are loosely modelled according to B0 in EvtGen 0.000200 K+ e- anti-nu_e ISGW2; @@ -5899,14 +5899,14 @@ Decay anti-B_s0 #*********************************************************** # Mark Whitehead 30/4/2010 Updated K+K- # PR LHCb 04/07/04 update BR -0.000033000 K- K+ PHSP; +0.000033000 K- K+ PHSP; # PR LHCb 04/07/04 split into KS/KL 0.0000000 anti-K0 K0 PHSP; 0.0000100 K_S0 K_S0 PHSP; 0.0000100 K_L0 K_L0 PHSP; # # PR LHCb 04/07/04 update BR -0.000004900 pi- K+ PHSP; +0.000004900 pi- K+ PHSP; 0.0000002 pi0 K0 PHSP; # PR LHCb 04/07/04 add Bs->pi+ pi- 0.00000001 pi+ pi- PHSP; @@ -5953,7 +5953,7 @@ Decay anti-B_s0 0.000006 K*- K*+ SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; # PR LHCb 04/07/04 Update BR 0.000004 anti-K*0 K*0 SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; -0.000014000 phi phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; +0.000014000 phi phi SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; 0.000000004 phi K*0 SVV_HELAMP 1.0 0.0 1.0 0.0 1.0 0.0; # decays that go via b->d penguins @@ -5962,7 +5962,7 @@ Decay anti-B_s0 #PR LHCb 04/08/2004 add Bs -> tau+ tau- (BR not to take too seriously) 0.0000000020 tau- tau+ PHSP; # PR LHCb 04/05/2004 : add Bs -> phi gamma -0.000057000 phi gamma SVP_HELAMP 1.0 0.0 1.0 0.0; +0.000057000 phi gamma SVP_HELAMP 1.0 0.0 1.0 0.0; # PR LHCb 04/08/2004 : add Bs -> phi mu mu, phi e e 0.0000023 phi e- e+ BTOSLLALI; 0.0000023 phi mu- mu+ BTOSLLALI; @@ -5971,7 +5971,7 @@ Decay anti-B_s0 #-------------- # 257.122e-6 for all the charmless hadronic # -0.000150000 D_s- K+ PHSP; +0.000150000 D_s- K+ PHSP; Enddecay @@ -6241,9 +6241,9 @@ Decay D+ 0.000237000 K+ pi0 PHSP; 0.000332000 K+ pi+ pi- PHSP; 0.000089000 K+ K+ K- PHSP; -0.001720000 K- rho0 pi+ pi+ PHSP; -0.001600000 eta' pi+ pi0 PHSP; -0.000210000 K+ rho0 PHSP; +0.001720000 K- rho0 pi+ pi+ PHSP; +0.001600000 eta' pi+ pi0 PHSP; +0.000210000 K+ rho0 PHSP; Enddecay @@ -6388,9 +6388,9 @@ Decay D- 0.000237000 K- pi0 PHSP; 0.000332000 K- pi+ pi- PHSP; 0.000089000 K- K+ K- PHSP; -0.001720000 K+ rho0 pi- pi- PHSP; -0.001600000 eta' pi- pi0 PHSP; -0.000210000 K- rho0 PHSP; +0.001720000 K+ rho0 pi- pi- PHSP; +0.001600000 eta' pi- pi0 PHSP; +0.000210000 K- rho0 PHSP; Enddecay diff --git a/decaylanguage/data/__init__.py b/decaylanguage/data/__init__.py index 85964dd6..31b25323 100644 --- a/decaylanguage/data/__init__.py +++ b/decaylanguage/data/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- try: from importlib.resources import open_text except ImportError: diff --git a/decaylanguage/dec/__init__.py b/decaylanguage/dec/__init__.py index d921b2da..a07a7890 100644 --- a/decaylanguage/dec/__init__.py +++ b/decaylanguage/dec/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .dec import DecFileParser from .enums import known_decay_models diff --git a/decaylanguage/dec/dec.py b/decaylanguage/dec/dec.py index c65ea2a2..5bc6cedf 100644 --- a/decaylanguage/dec/dec.py +++ b/decaylanguage/dec/dec.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -84,13 +85,15 @@ class DecFileParser(object): >>> dfp.parse() """ - __slots__ = ("_grammar", - "_grammar_info", - "_dec_file_names", - "_dec_file", - "_parsed_dec_file", - "_parsed_decays", - "_include_ccdecays") + __slots__ = ( + "_grammar", + "_grammar_info", + "_dec_file_names", + "_dec_file", + "_parsed_dec_file", + "_parsed_decays", + "_include_ccdecays", + ) def __init__(self, *filenames): """ @@ -101,7 +104,7 @@ def __init__(self, *filenames): filenames: non-keyworded variable length argument Input .dec decay file name(s). """ - self._grammar = None # Loaded Lark grammar definition file + self._grammar = None # Loaded Lark grammar definition file self._grammar_info = None # Name of Lark grammar definition file # Name(s) of the input decay file(s) @@ -115,15 +118,17 @@ def __init__(self, *filenames): if not os.path.exists(filename): raise FileNotFoundError("'{0}'!".format(filename)) - with open(filename, 'r') as file: + with open(filename, "r") as file: for line in file: # We need to strip the unicode byte ordering if present before checking for * - beg = line.lstrip('\ufeff').lstrip() + beg = line.lstrip("\ufeff").lstrip() # Make sure one discards all lines "End" # in intermediate files, to avoid a parsing error - if not ( beg.startswith('End') and not beg.startswith('Enddecay')): + if not ( + beg.startswith("End") and not beg.startswith("Enddecay") + ): stream.write(line) - stream.write('\n') + stream.write("\n") stream.seek(0) # Content of input file(s) @@ -133,7 +138,7 @@ def __init__(self, *filenames): self._dec_file = None self._parsed_dec_file = None # Parsed decay file - self._parsed_decays = None # Particle decays found in the decay file + self._parsed_decays = None # Particle decays found in the decay file # By default, consider charge-conjugate decays when parsing self._include_ccdecays = True @@ -152,7 +157,7 @@ def from_string(cls, filecontent): stream.seek(0) _cls = cls() - _cls._dec_file_names = '' + _cls._dec_file_names = "" _cls._dec_file = stream.read() return _cls @@ -184,11 +189,12 @@ def parse(self, include_ccdecays=True): # Retrieve all info on the default Lark grammar and its default options, # effectively loading it opts = self.grammar_info() - extraopts = dict((k, v) for k, v in opts.items() - if k not in ('lark_file', 'parser','lexer')) + extraopts = dict( + (k, v) for k, v in opts.items() if k not in ("lark_file", "parser", "lexer") + ) # Instantiate the Lark parser according to chosen settings - parser = Lark(self.grammar(), parser=opts['parser'], lexer=opts['lexer']) + parser = Lark(self.grammar(), parser=opts["parser"], lexer=opts["lexer"]) self._parsed_dec_file = parser.parse(self._dec_file) @@ -243,7 +249,7 @@ def grammar_info(self): return self._grammar_info - def load_grammar(self, filename=None, parser='lalr', lexer='standard', **options): + def load_grammar(self, filename=None, parser="lalr", lexer="standard", **options): """ Load a Lark grammar definition file, either the default one, or a user-specified one, optionally setting Lark parsing options. @@ -264,7 +270,7 @@ def load_grammar(self, filename=None, parser='lalr', lexer='standard', **options """ if filename is None: - filename = 'decfile.lark' + filename = "decfile.lark" with data.open_text(data, filename) as f: self._grammar = f.read() else: @@ -273,7 +279,9 @@ def load_grammar(self, filename=None, parser='lalr', lexer='standard', **options self._grammar = open(filename).read() - self._grammar_info = dict(lark_file=filename, parser=parser, lexer=lexer, **options) + self._grammar_info = dict( + lark_file=filename, parser=parser, lexer=lexer, **options + ) @property def grammar_loaded(self): @@ -413,7 +421,10 @@ def _add_decays_to_be_copied(self): decays2copy = self.dict_decays2copy() # match name -> position in list self._parsed_decays - name2treepos = {t.children[0].children[0].value:i for i, t in enumerate(self._parsed_decays)} + name2treepos = { + t.children[0].children[0].value: i + for i, t in enumerate(self._parsed_decays) + } # Make the copies taking care to change the name of the mother particle copied_decays = [] @@ -428,7 +439,9 @@ def _add_decays_to_be_copied(self): misses.append(decay2copy) if len(misses) > 0: msg = """\nCorresponding 'Decay' statement for 'CopyDecay' statement(s) of following particle(s) not found:\n{0}. -Skipping creation of these copied decay trees.""".format('\n'.join([m for m in misses])) +Skipping creation of these copied decay trees.""".format( + "\n".join([m for m in misses]) + ) warnings.warn(msg) # Actually add all these copied decays to the list of decays! @@ -463,13 +476,16 @@ def _add_charge_conjugate_decays(self): # Cross-check - make sure charge conjugate decays are not defined # with both 'Decay' and 'CDecay' statements! - mother_names_decays = [get_decay_mother_name(tree) - for tree in self._parsed_decays] + mother_names_decays = [ + get_decay_mother_name(tree) for tree in self._parsed_decays + ] duplicates = [n for n in mother_names_ccdecays if n in mother_names_decays] if len(duplicates) > 0: msg = """The following particles are defined in the input .dec file with both 'Decay' and 'CDecay': {0}! -The 'CDecay' definition(s) will be ignored ...""".format(', '.join(d for d in duplicates)) +The 'CDecay' definition(s) will be ignored ...""".format( + ", ".join(d for d in duplicates) + ) warnings.warn(msg) # If that's the case, proceed using the decay definitions specified @@ -490,7 +506,10 @@ def _add_charge_conjugate_decays(self): dict_cc_names = self.dict_charge_conjugates() # match name -> position in list self._parsed_decays - name2treepos = {t.children[0].children[0].value:i for i, t in enumerate(self._parsed_decays)} + name2treepos = { + t.children[0].children[0].value: i + for i, t in enumerate(self._parsed_decays) + } trees_to_conjugate = [] misses = [] @@ -503,10 +522,12 @@ def _add_charge_conjugate_decays(self): misses.append(ccname) if len(misses) > 0: msg = """\nCorresponding 'Decay' statement for 'CDecay' statement(s) of following particle(s) not found:\n{0}. -Skipping creation of these charge-conjugate decay trees.""".format('\n'.join([m for m in misses])) +Skipping creation of these charge-conjugate decay trees.""".format( + "\n".join([m for m in misses]) + ) warnings.warn(msg) - cdecays = [ tree.__deepcopy__(None) for tree in trees_to_conjugate] + cdecays = [tree.__deepcopy__(None) for tree in trees_to_conjugate] # Finally, perform all particle -> anti(particle) replacements, # taking care of charge conjugate decays defined via aliases, @@ -516,15 +537,21 @@ def _is_not_self_conj(t): mname = t.children[0].children[0].value if Particle.from_evtgen_name(mname).is_self_conjugate: msg = """Found 'CDecay' statement for self-conjugate particle {0}. This is a bug! -Skipping creation of charge-conjugate decay Tree.""".format(mname) +Skipping creation of charge-conjugate decay Tree.""".format( + mname + ) warnings.warn(msg) return False else: return True except: return True - [ChargeConjugateReplacement(charge_conj_defs=dict_cc_names).visit(t) - for t in cdecays if _is_not_self_conj(t)] + + [ + ChargeConjugateReplacement(charge_conj_defs=dict_cc_names).visit(t) + for t in cdecays + if _is_not_self_conj(t) + ] # ... and add all these charge-conjugate decays to the list of decays! self._parsed_decays.extend(cdecays) @@ -545,9 +572,11 @@ def _check_parsed_decays(self): lmn = self.list_decay_mother_names() duplicates = [] if self.number_of_decays != len(set(lmn)): - duplicates = set([n for n in lmn if lmn.count(n)>1]) + duplicates = set([n for n in lmn if lmn.count(n) > 1]) msg = """The following particle(s) is(are) redefined in the input .dec file with 'Decay': {0}! -All but the first occurrence will be discarded/removed ...""".format(', '.join(d for d in duplicates)) +All but the first occurrence will be discarded/removed ...""".format( + ", ".join(d for d in duplicates) + ) warnings.warn(msg) # Create a list with all occurrences to remove @@ -555,8 +584,8 @@ def _check_parsed_decays(self): duplicates_to_remove = [] for item in duplicates: c = lmn.count(item) - if c>1: - duplicates_to_remove.extend([item]*(c-1)) + if c > 1: + duplicates_to_remove.extend([item] * (c - 1)) # Actually remove all but the first occurrence of duplicate decays for tree in reversed(self._parsed_decays): @@ -594,7 +623,7 @@ def _find_decay_modes(self, mother): for decay_Tree in self._parsed_decays: if get_decay_mother_name(decay_Tree) == mother: - return tuple(decay_Tree.find_data('decayline')) + return tuple(decay_Tree.find_data("decayline")) raise DecayNotFound("Decays of particle '%s' not found in .dec file!" % mother) @@ -620,8 +649,10 @@ def list_decay_modes(self, mother, pdg_name=False): if pdg_name: mother = PDG2EvtGenNameMap[mother] - return [get_final_state_particle_names(mode) - for mode in self._find_decay_modes(mother)] + return [ + get_final_state_particle_names(mode) + for mode in self._find_decay_modes(mother) + ] def _decay_mode_details(self, decay_mode): """ @@ -636,10 +667,9 @@ def _decay_mode_details(self, decay_mode): return (bf, fsp_names, model, model_params) - def print_decay_modes(self, mother, - pdg_name=False, - print_model=True, - ascending=False): + def print_decay_modes( + self, mother, pdg_name=False, print_model=True, ascending=False + ): """ Pretty print of the decay modes of a given particle. @@ -666,18 +696,31 @@ def print_decay_modes(self, mother, if print_model: for dm in dms: dm_details = self._decay_mode_details(dm) - l.append((dm_details[0],'%-50s %15s %s' % (' '.\ - join(p for p in dm_details[1]), dm_details[2], dm_details[3]))) + l.append( + ( + dm_details[0], + "%-50s %15s %s" + % ( + " ".join(p for p in dm_details[1]), + dm_details[2], + dm_details[3], + ), + ) + ) else: for dm in dms: fsp_names = get_final_state_particle_names(dm) - l.append((get_branching_fraction(dm), - '%-50s' % (' '.join(p for p in fsp_names)))) + l.append( + ( + get_branching_fraction(dm), + "%-50s" % (" ".join(p for p in fsp_names)), + ) + ) l.sort(key=operator.itemgetter(0), reverse=(not ascending)) for bf, info in l: - print('%12g : %s' % (bf, info)) + print("%12g : %s" % (bf, info)) def build_decay_chains(self, mother, stable_particles=[]): """ @@ -734,14 +777,14 @@ def build_decay_chains(self, mother, stable_particles=[]): >>> p.build_decay_chains('D+', stable_particles=['pi0']) {'D+': [{'bf': 1.0, 'fs': ['K-', 'pi+', 'pi+', 'pi0'], 'model': 'PHSP', 'model_params': ''}]} """ - keys = ('bf', 'fs', 'model', 'model_params') + keys = ("bf", "fs", "model", "model_params") info = list() - for dm in (self._find_decay_modes(mother)): + for dm in self._find_decay_modes(mother): list_dm_details = self._decay_mode_details(dm) - d = dict(zip(keys,list_dm_details)) + d = dict(zip(keys, list_dm_details)) - for i, fs in enumerate(d['fs']): + for i, fs in enumerate(d["fs"]): if fs in stable_particles: continue @@ -751,22 +794,24 @@ def build_decay_chains(self, mother, stable_particles=[]): _n_dms = len(self._find_decay_modes(fs)) _info = self.build_decay_chains(fs, stable_particles) - d['fs'][i] = _info + d["fs"][i] = _info except DecayNotFound: pass info.append(d) - info = {mother:info} + info = {mother: info} return info def __repr__(self): if self._parsed_dec_file is not None: return "<{self.__class__.__name__}: decfile(s)={decfile}, n_decays={n_decays}>".format( - self=self, decfile=self._dec_file_names, n_decays=self.number_of_decays) + self=self, decfile=self._dec_file_names, n_decays=self.number_of_decays + ) else: - return "<{self.__class__.__name__}: decfile(s)={decfile}>"\ - .format(self=self, decfile=self._dec_file_names) + return "<{self.__class__.__name__}: decfile(s)={decfile}>".format( + self=self, decfile=self._dec_file_names + ) def __str__(self): return repr(self) @@ -803,6 +848,7 @@ class DecayModelParamValueReplacement(Visitor): Tree(particle, [Token(LABEL, 'anti-B0')]), Tree(model, [Token(MODEL_NAME, 'VSS_BMIX'), Tree(model_options, [Token(LABEL, 507000000000.0)])])])]) """ + def __init__(self, define_defs=dict()): self.define_defs = define_defs @@ -817,7 +863,7 @@ def model_options(self, tree): """ Method for the rule (here, a replacement) we wish to implement. """ - assert tree.data == 'model_options' + assert tree.data == "model_options" for child in tree.children: self._replacement(child) @@ -857,6 +903,7 @@ class ChargeConjugateReplacement(Visitor): [Tree(value, [Token(SIGNED_NUMBER, '1.0')]), Tree(particle, [Token(LABEL, 'K+')]), Tree(particle, [Token(LABEL, 'pi-')]), Tree(model, [Token(MODEL_NAME, 'PHSP')])])]) """ + def __init__(self, charge_conj_defs=dict()): self.charge_conj_defs = charge_conj_defs @@ -864,12 +911,13 @@ def particle(self, tree): """ Method for the rule (here, a replacement) we wish to implement. """ - assert tree.data == 'particle' + assert tree.data == "particle" pname = tree.children[0].value ccpname = find_charge_conjugate_match(pname, self.charge_conj_defs) self.charge_conj_defs[pname] = ccpname tree.children[0].value = ccpname + def find_charge_conjugate_match(pname, dict_cc_names=dict()): """ Find the charge-conjugate particle name making use of user information @@ -890,6 +938,7 @@ def find_charge_conjugate_match(pname, dict_cc_names=dict()): return charge_conjugate_name(pname) + def get_decay_mother_name(decay_tree): """ Return the mother particle name for the decay mode defined @@ -900,7 +949,7 @@ def get_decay_mother_name(decay_tree): decay_tree: Lark Tree instance Input Tree satisfying Tree.data=='decay'. """ - if not isinstance(decay_tree, Tree) or decay_tree.data != 'decay': + if not isinstance(decay_tree, Tree) or decay_tree.data != "decay": raise RuntimeError("Input not an instance of a 'decay' Tree!") # For a 'decay' Tree, tree.children[0] is the mother particle Tree @@ -918,7 +967,7 @@ def get_branching_fraction(decay_mode): decay_mode: Lark Tree instance Input Tree satisfying Tree.data=='decayline'. """ - if not isinstance(decay_mode, Tree) or decay_mode.data != 'decayline': + if not isinstance(decay_mode, Tree) or decay_mode.data != "decayline": raise RuntimeError("Check your input, not an instance of a 'decayline' Tree!") # For a 'decayline' Tree, Tree.children[0] is the branching fraction Tree @@ -926,7 +975,9 @@ def get_branching_fraction(decay_mode): try: # the branching fraction value as a float return float(decay_mode.children[0].children[0].value) except RuntimeError: - raise RuntimeError("'decayline' Tree does not seem to have the usual structure. Check it.") + raise RuntimeError( + "'decayline' Tree does not seem to have the usual structure. Check it." + ) def get_final_state_particles(decay_mode): @@ -949,11 +1000,11 @@ def get_final_state_particles(decay_mode): [Tree(particle, [Token(LABEL, 'K+')]), Tree(particle, [Token(LABEL, 'K-')])] will be returned. """ - if not isinstance(decay_mode, Tree) or decay_mode.data != 'decayline': + if not isinstance(decay_mode, Tree) or decay_mode.data != "decayline": raise RuntimeError("Input not an instance of a 'decayline' Tree!") # list of Trees of final-state particles - return list(decay_mode.find_data('particle')) + return list(decay_mode.find_data("particle")) def get_final_state_particle_names(decay_mode): @@ -974,7 +1025,7 @@ def get_final_state_particle_names(decay_mode): Enddecay the list ['K+', 'K-'] will be returned. """ - if not isinstance(decay_mode, Tree) or decay_mode.data != 'decayline': + if not isinstance(decay_mode, Tree) or decay_mode.data != "decayline": raise RuntimeError("Input not an instance of a 'decayline' Tree!") fsps = get_final_state_particles(decay_mode) @@ -999,10 +1050,10 @@ def get_model_name(decay_mode): Enddecay the string 'SSD_CP' will be returned. """ - if not isinstance(decay_mode, Tree) or decay_mode.data != 'decayline': + if not isinstance(decay_mode, Tree) or decay_mode.data != "decayline": raise RuntimeError("Input not an instance of a 'decayline' Tree!") - lm = list(decay_mode.find_data('model')) + lm = list(decay_mode.find_data("model")) return str(lm[0].children[0].value) @@ -1034,10 +1085,10 @@ def get_model_parameters(decay_mode): ['DtoKpipipi_v1'] will be returned. """ - if not isinstance(decay_mode, Tree) or decay_mode.data != 'decayline': + if not isinstance(decay_mode, Tree) or decay_mode.data != "decayline": raise RuntimeError("Input not an instance of a 'decayline' Tree!") - lmo = list(decay_mode.find_data('model_options')) + lmo = list(decay_mode.find_data("model_options")) def _value(t): try: @@ -1045,7 +1096,7 @@ def _value(t): except AttributeError: return t.value - return [_value(tree) for tree in lmo[0].children] if len(lmo) == 1 else '' + return [_value(tree) for tree in lmo[0].children] if len(lmo) == 1 else "" def get_decays(parsed_file): @@ -1061,11 +1112,11 @@ def get_decays(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: - return list(parsed_file.find_data('decay')) + return list(parsed_file.find_data("decay")) except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1081,11 +1132,16 @@ def get_charge_conjugate_decays(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: - return sorted([tree.children[0].children[0].value for tree in parsed_file.find_data('cdecay')]) + return sorted( + [ + tree.children[0].children[0].value + for tree in parsed_file.find_data("cdecay") + ] + ) except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1102,12 +1158,14 @@ def get_decays2copy_statements(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: - return {tree.children[0].children[0].value:tree.children[1].children[0].value - for tree in parsed_file.find_data('copydecay')} + return { + tree.children[0].children[0].value: tree.children[1].children[0].value + for tree in parsed_file.find_data("copydecay") + } except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1122,12 +1180,16 @@ def get_definitions(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: - return {tree.children[0].children[0].value:float(tree.children[1].children[0].value) - for tree in parsed_file.find_data('define')} + return { + tree.children[0] + .children[0] + .value: float(tree.children[1].children[0].value) + for tree in parsed_file.find_data("define") + } except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1142,12 +1204,14 @@ def get_aliases(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: - return {tree.children[0].children[0].value:tree.children[1].children[0].value - for tree in parsed_file.find_data('alias')} + return { + tree.children[0].children[0].value: tree.children[1].children[0].value + for tree in parsed_file.find_data("alias") + } except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1163,12 +1227,14 @@ def get_charge_conjugate_defs(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: - return {tree.children[0].children[0].value:tree.children[1].children[0].value - for tree in parsed_file.find_data('chargeconj')} + return { + tree.children[0].children[0].value: tree.children[1].children[0].value + for tree in parsed_file.find_data("chargeconj") + } except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1187,7 +1253,7 @@ def get_pythia_definitions(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") def str_or_float(arg): @@ -1197,8 +1263,12 @@ def str_or_float(arg): return arg try: - return {'{0}:{1}'.format(tree.children[0].value, tree.children[1].value):str_or_float(tree.children[2].value) - for tree in parsed_file.find_data('pythia_def')} + return { + "{0}:{1}".format( + tree.children[0].value, tree.children[1].value + ): str_or_float(tree.children[2].value) + for tree in parsed_file.find_data("pythia_def") + } except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1217,14 +1287,17 @@ def get_jetset_definitions(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") - get_jetsetpar = re.compile(r""" + get_jetsetpar = re.compile( + r""" ^ # Beginning of string (?P [a-zA-Z]+? ) # One or more characters, non-greedy \( (?P \d+ ) \) # parameter number in () - """, re.VERBOSE) + """, + re.VERBOSE, + ) def to_int_or_float(n): """ @@ -1242,14 +1315,16 @@ def to_int_or_float(n): try: dict_params = {} - for tree in parsed_file.find_data('jetset_def'): + for tree in parsed_file.find_data("jetset_def"): param = get_jetsetpar.match(tree.children[0].value).groupdict() try: - dict_params[param['pname']].update( - {int(param['pnumber']):to_int_or_float(tree.children[1].value)}) + dict_params[param["pname"]].update( + {int(param["pnumber"]): to_int_or_float(tree.children[1].value)} + ) except KeyError: - dict_params[param['pname']] =\ - {int(param['pnumber']):to_int_or_float(tree.children[1].value)} + dict_params[param["pname"]] = { + int(param["pnumber"]): to_int_or_float(tree.children[1].value) + } return dict_params except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1270,12 +1345,12 @@ def get_lineshape_definitions(parsed_file): parsed_file: Lark Tree instance Input parsed file. """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") try: d = list() - for tree in parsed_file.find_data('setlspw'): + for tree in parsed_file.find_data("setlspw"): particles = [p.children[0].value for p in tree.children[:-1]] val = int(tree.children[3].children[0].value) d.append((particles, val)) @@ -1302,21 +1377,21 @@ def get_global_photos_flag(parsed_file): out: PhotosEnum, default=PhotosEnum.no PhotosEnum.yes / PhotosEnum.no if PHOTOS enabled / disabled """ - if not isinstance(parsed_file, Tree) : + if not isinstance(parsed_file, Tree): raise RuntimeError("Input not an instance of a Tree!") # Check if the flag is not set more than once, just in case ... - tree = tuple(parsed_file.find_data('global_photos')) + tree = tuple(parsed_file.find_data("global_photos")) if len(tree) == 0: return PhotosEnum.no elif len(tree) > 1: - warnings.warn("PHOTOS flag re-set! Using flag set in last ...") + warnings.warn("PHOTOS flag re-set! Using flag set in last ...") tree = tree[-1] try: val = tree.children[0].data - return PhotosEnum.yes if val=='yes' else PhotosEnum.no + return PhotosEnum.yes if val == "yes" else PhotosEnum.no except: RuntimeError("Input parsed file does not seem to have the expected structure.") @@ -1324,38 +1399,56 @@ def get_global_photos_flag(parsed_file): class TreeToDec(Transformer): def yes(self, items): return True + def no(self, items): return False + def global_photos(self, items): - item, = items + (item,) = items return PhotosEnum.yes if item else PhotosEnum.no + def value(self, items): - item, = items + (item,) = items return float(item) + def label(self, items): - item, = items + (item,) = items return str(item) + def photos(self, items): return PhotosEnum.yes def define(transformed): - return {x.children[0]:x.children[1] for x in transformed.find_data('define')} + return {x.children[0]: x.children[1] for x in transformed.find_data("define")} + + def pythia_def(transformed): - return [x.children for x in transformed.find_data('pythia_def')] + return [x.children for x in transformed.find_data("pythia_def")] + + def alias(transformed): - return {x.children[0]:x.children[1] for x in transformed.find_data('alias')} + return {x.children[0]: x.children[1] for x in transformed.find_data("alias")} + def chargeconj(transformed): - return {x.children[0]:x.children[1] for x in transformed.find_data('chargeconj')} + return {x.children[0]: x.children[1] for x in transformed.find_data("chargeconj")} + # Commands def global_photos(transformed): - return {x.children[0]:x.children[1] for x in transformed.find_data('global_photos')} + return { + x.children[0]: x.children[1] for x in transformed.find_data("global_photos") + } + def decay(transformed): - return Tree('decay', list(transformed.find_data('decay'))) + return Tree("decay", list(transformed.find_data("decay"))) + + def cdecay(transformed): - return [x.children[0] for x in transformed.find_data('cdecay')] + return [x.children[0] for x in transformed.find_data("cdecay")] + + def setlspw(transformed): - return list(transformed.find_data('setlspw')) + return list(transformed.find_data("setlspw")) diff --git a/decaylanguage/dec/decparser.py b/decaylanguage/dec/decparser.py index c4a881cd..532c44d6 100644 --- a/decaylanguage/dec/decparser.py +++ b/decaylanguage/dec/decparser.py @@ -1,10 +1,11 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import -#try: +# try: # import cmd2 as cmd -#except: +# except: import cmd import os @@ -13,22 +14,58 @@ DIR = os.path.dirname(__file__) -known_models = ['VSS', 'VSS_BMIX', 'PHSP', 'HELAMP', 'JETSET', 'PHOTOS', - 'ISGW2', 'HQET', 'GOITY_ROBERTS', 'VUB', 'SVS', - 'SVV_HELAMP', 'BTOXSGAMMA', 'BTOSLLBALL', 'BTOXSLL', - 'SSD_CP', 'SVV_CP', 'BTO3PI_CP', 'STS', 'JSCONT', 'SLN', - 'CB3PI-P00', 'CB3PI-MPP', 'VSP_PWAVE', 'TAUSCALARNU', - 'TAUHADNU', 'D_DALITZ', 'VLL', 'TSS', 'VVP', 'VVS_PWAVE', - 'PARTWAVE', 'TVS_PWAVE', 'OMEGA_DALITZ', 'ETA_DALITZ', - 'PI0_DALITZ', 'TAUVECTORNU', 'SVP_HELAMP', 'VVPIPI', - 'VPHOTOV', 'PYTHIA', 'BTOSLLALI' - ] +known_models = [ + "VSS", + "VSS_BMIX", + "PHSP", + "HELAMP", + "JETSET", + "PHOTOS", + "ISGW2", + "HQET", + "GOITY_ROBERTS", + "VUB", + "SVS", + "SVV_HELAMP", + "BTOXSGAMMA", + "BTOSLLBALL", + "BTOXSLL", + "SSD_CP", + "SVV_CP", + "BTO3PI_CP", + "STS", + "JSCONT", + "SLN", + "CB3PI-P00", + "CB3PI-MPP", + "VSP_PWAVE", + "TAUSCALARNU", + "TAUHADNU", + "D_DALITZ", + "VLL", + "TSS", + "VVP", + "VVS_PWAVE", + "PARTWAVE", + "TVS_PWAVE", + "OMEGA_DALITZ", + "ETA_DALITZ", + "PI0_DALITZ", + "TAUVECTORNU", + "SVP_HELAMP", + "VVPIPI", + "VPHOTOV", + "PYTHIA", + "BTOSLLALI", +] import os -if 'C3_DATA' in os.environ: - defdecfile = '%s/DECAY.DEC' % os.environ['C3_DATA'] + +if "C3_DATA" in os.environ: + defdecfile = "%s/DECAY.DEC" % os.environ["C3_DATA"] else: - defdecfile = os.path.join(DIR, '../data/DECAY_LHCB.DEC') + defdecfile = os.path.join(DIR, "../data/DECAY_LHCB.DEC") + class DaughterList(dict): def __init__(self): @@ -37,13 +74,15 @@ def __init__(self): def add(self, daughter): self[daughter] = self.get(daughter, 0) + class AllowedDecays(object): def __init__(self, particle): self.decay_of = particle self.decays = [] + class Decay(object): - def __init__(self, bf = 0, daughters = None): + def __init__(self, bf=0, daughters=None): if daughters == None: daughters = DaughterList() self.bf = bf @@ -52,44 +91,46 @@ def __init__(self, bf = 0, daughters = None): def daughters_to_string(self): strs = [] for dau in self.daughters: - strs += [dau]*self.daughters[dau] + strs += [dau] * self.daughters[dau] + def sortkey(x): p = Particle.from_search_list(name=x) if len(p) == 1: return p[0] else: return 100000 + strs.sort(key=sortkey, reverse=True) - return ' '.join(strs) + return " ".join(strs) class decparser(cmd.Cmd): def __init__(self, stdin=None, stdout=None): - self.prompt = '' - self.file_parse_status = '' + self.prompt = "" + self.file_parse_status = "" self.current_decay_top = None self.ignoreUntilSemicolon = False self.decaylist = {} self.cdecay_delayed = [] - cmd.Cmd.__init__(self, 'tab', stdin, stdout) + cmd.Cmd.__init__(self, "tab", stdin, stdout) def default(self, line): - if line[:1] == '#' or 'Photos' in line: + if line[:1] == "#" or "Photos" in line: pass elif self.ignoreUntilSemicolon: - if ';' in line: + if ";" in line: self.ignoreUntilSemicolon = False - elif self.file_parse_status == 'Decay': + elif self.file_parse_status == "Decay": self.addline(line) else: cmd.Cmd.default(self, line) def emptyline(self): - return '' + return "" -## def precmd(self, line): -## print line -## return line + ## def precmd(self, line): + ## print line + ## return line def do_EOF(self, line): return True @@ -110,48 +151,50 @@ def do_SetLineshapePW(self, line): pass def do_ModelAlias(self, line): - if not ';' in line: + if not ";" in line: self.ignoreUntilSemicolon = True def do_Decay(self, line): - if self.file_parse_status == 'Decay': - raise Exception('Repeated Decay statement: %s' % line) + if self.file_parse_status == "Decay": + raise Exception("Repeated Decay statement: %s" % line) else: - self.file_parse_status = 'Decay' + self.file_parse_status = "Decay" particle = Particle.from_string(line.split()[0]) self.current_decay_top = AllowedDecays(particle) def do_CDecay(self, line): -## print 'CDecay for', line - if self.file_parse_status == 'Decay': - raise Exception('Cannot do CDecay in Decay block: %s' % line) + ## print 'CDecay for', line + if self.file_parse_status == "Decay": + raise Exception("Cannot do CDecay in Decay block: %s" % line) conj = Particle.from_string(line.split()[0]).invert() if conj not in self.decaylist: print(line) self.cdecay_delayed.append(line) return - raise Exception('CDecay without conjugate mode: %s' % line) + raise Exception("CDecay without conjugate mode: %s" % line) self.current_decay_top = AllowedDecays(line.split()[0]) -## print conj + ## print conj for cdecay in self.decaylist[conj].decays: -## print cdecay.daughters + ## print cdecay.daughters ndecay = Decay() ndecay.bf = cdecay.bf for dau in cdecay.daughters: -## print cdecay.daughters - ndecay.daughters[Particle.from_string(dau).invert()] = cdecay.daughters[dau] -## print ndecay.daughters + ## print cdecay.daughters + ndecay.daughters[Particle.from_string(dau).invert()] = cdecay.daughters[ + dau + ] + ## print ndecay.daughters -## print cdecay.daughters, ndecay.daughters -## print line + ## print cdecay.daughters, ndecay.daughters + ## print line self.current_decay_top.decays.append(ndecay) self.decaylist[self.current_decay_top.decay_of] = self.current_decay_top self.current_decay_top = None def do_End(self, line): -## return self.do_EOF(line) + ## return self.do_EOF(line) if self.cdecay_delayed != []: - print('Ought to be [] 0!:', self.cdecay_delayed, len(self.cdecay_delayed)) + print("Ought to be [] 0!:", self.cdecay_delayed, len(self.cdecay_delayed)) while self.cdecay_delayed: self.do_CDecay(self.cdecay_delayed.pop()) pass @@ -161,57 +204,58 @@ def addline(self, line): try: bf = float(spl[0]) except: - raise Exception('Cannot parse decay line: %s' % line) -## mod_found = False -## for x in known_models: -## if x in line: -## mod_found = True -## if not mod_found: -## print '\n', line - while not spl[-1][-1:] == ';': -## print 'fixin', line + raise Exception("Cannot parse decay line: %s" % line) + ## mod_found = False + ## for x in known_models: + ## if x in line: + ## mod_found = True + ## if not mod_found: + ## print '\n', line + while not spl[-1][-1:] == ";": + ## print 'fixin', line if self.use_rawinput: - line = ' '.join((line, input())) + line = " ".join((line, input())) else: - line = ' '.join((line, self.stdin.readline())) + line = " ".join((line, self.stdin.readline())) spl = line.split() -## print spl - killindex = None; i = 0 -## print spl + ## print spl + killindex = None + i = 0 + ## print spl while killindex == None and i < len(spl): - if spl[i][-1] == ';': spl[i] = spl[i][:-1] + if spl[i][-1] == ";": + spl[i] = spl[i][:-1] if spl[i] in known_models: killindex = i i += 1 if killindex == None: print(spl) - raise Exception('No decay model specified: %s' % line) -## print spl[killindex:] + raise Exception("No decay model specified: %s" % line) + ## print spl[killindex:] decay = Decay() decay.bf = bf -## print self.current_decay_top.decay_of, 'to', + ## print self.current_decay_top.decay_of, 'to', for part in spl[1:killindex]: -## print part, + ## print part, if part in decay.daughters: decay.daughters[part] += 1 else: decay.daughters[part] = 1 -## print decay.daughters + ## print decay.daughters self.current_decay_top.decays.append(decay) -## print self.current_decay_top.decays + + ## print self.current_decay_top.decays def do_Enddecay(self, line): - if self.file_parse_status != 'Decay': - raise Exception('Enddecay with no decay: %s' % line) + if self.file_parse_status != "Decay": + raise Exception("Enddecay with no decay: %s" % line) else: - self.file_parse_status = '' + self.file_parse_status = "" self.decaylist[self.current_decay_top.decay_of] = self.current_decay_top self.current_decay_top = None - - def do_hi(self, aft): - print('hi', aft) + print("hi", aft) class interactive(cmd.Cmd): @@ -219,55 +263,54 @@ def __init__(self): cmd.Cmd.__init__(self) self.decaylist = {} # These are particles we don't decay - self.termpart = ['pi0', 'K_S0'] + self.termpart = ["pi0", "K_S0"] def do_readfile(self, line): "Read a file in (defaults to the one built into this package)" - if line == '': + if line == "": fname = defdecfile else: fname = line - with open(fname, 'r') as infile: + with open(fname, "r") as infile: q = decparser(stdin=infile) q.use_rawinput = False q.cmdloop() self.decaylist = q.decaylist def do_dump(self, line): -## print line - if line == '': + ## print line + if line == "": lpart = self.decaylist else: lpart = line.split() for part in lpart: if part not in self.decaylist: - print('Unknown particle %s' % part) + print("Unknown particle %s" % part) else: - print('-------------', part, '-------------') + print("-------------", part, "-------------") for decay in self.decaylist[part].decays: - print(decay.bf, end=' ') - print(decay.daughters_to_string(), end=' ') + print(decay.bf, end=" ") + print(decay.daughters_to_string(), end=" ") print() - def do_termpart(self, line): - if line == '': + if line == "": print(self.termpart) elif len(line.split()) == 2: cmd = line.split()[0].lower() - if cmd not in ('add', 'del'): - print('Unknown command %s' % cmd) + if cmd not in ("add", "del"): + print("Unknown command %s" % cmd) else: part = line.split()[1] - if cmd == 'add' and part not in self.termpart: + if cmd == "add" and part not in self.termpart: self.termpart.append(part) - elif cmd == 'del': + elif cmd == "del": if part not in self.termpart: - print('%s not in list of terminating particles' % part) + print("%s not in list of terminating particles" % part) else: del self.termpart[self.termpart.index(part)] else: - print('Syntax: termpart [(add|del) particle]') + print("Syntax: termpart [(add|del) particle]") def do_exit(self, line): return self.do_EOF(line) @@ -276,34 +319,34 @@ def do_quit(self, line): return self.do_EOF(line) def do_final(self, line): -## predeclist = self.decaylist[line].decays[:] -## declist = [] -## for dec in predeclist: -## declist.append([[],dec]) -## decaytable = self.decaylist.copy() -## for j in self.termpart: -## del decaytable[j] -## last = [] -## while last != declist: -## last = declist[:] -## recurseOneLevel(declist, decaytable) + ## predeclist = self.decaylist[line].decays[:] + ## declist = [] + ## for dec in predeclist: + ## declist.append([[],dec]) + ## decaytable = self.decaylist.copy() + ## for j in self.termpart: + ## del decaytable[j] + ## last = [] + ## while last != declist: + ## last = declist[:] + ## recurseOneLevel(declist, decaytable) declist = self.getDecList(line) declist = compactDecayList(declist) declist.sort(key=lambda x: x.bf, reverse=True) for dec in declist: -## print dec + ## print dec print(dec.bf, dec.daughters_to_string()) def getDecList(self, line): predeclist = self.decaylist[line].decays[:] declist = [] for dec in predeclist: - declist.append([[(line, dec)],dec]) -## print dec -## declist.append([[],dec]) -## dlist = DaughterList() -## dlist.add(line) -## declist = [[[],Decay(1,dlist)]] + declist.append([[(line, dec)], dec]) + ## print dec + ## declist.append([[],dec]) + ## dlist = DaughterList() + ## dlist.add(line) + ## declist = [[[],Decay(1,dlist)]] decaytable = self.decaylist.copy() for j in self.termpart: del decaytable[j] @@ -313,37 +356,35 @@ def getDecList(self, line): recurseOneLevel(declist, decaytable) return declist - def do_explain(self, line): - part = input('Parent particle? ') + part = input("Parent particle? ") if part not in self.decaylist: - print('%s not known. Have you read the file yet?' % part) + print("%s not known. Have you read the file yet?" % part) return - final = input('Final state? ').split() + final = input("Final state? ").split() finalhash = {} termpartcpy = self.termpart[:] for p in final: - finalhash[p] = finalhash.get(p,0) + 1 + finalhash[p] = finalhash.get(p, 0) + 1 if p in self.decaylist and p not in self.termpart: self.termpart.append(p) declist = self.getDecList(part) self.termpart[:] = termpartcpy sublist = [] for dec in declist: -## print dec[1].daughters + ## print dec[1].daughters if dec[1].daughters == finalhash: sublist.append(dec) sublist.sort(key=lambda x: x[1].bf, reverse=True) for entry in sublist: - print(entry[1].bf, end=' ') + print(entry[1].bf, end=" ") if len(entry[0]) == 0: - print('Direct ') + print("Direct ") else: - print('Chain: ') + print("Chain: ") for e2 in entry[0]: -## print e2 - print('\t', e2[0], '->', e2[1].daughters_to_string()) - + ## print e2 + print("\t", e2[0], "->", e2[1].daughters_to_string()) def do_oneshot(self, line): declist = self.decaylist[line].decays[:] @@ -355,6 +396,7 @@ def do_EOF(self, line): print() return True + def recurseOneLevel(decaylist, decaytable): """Do one sweep at replacing particles with daughters.""" """decaylist should be tuple of [[(particle, decay) ...], finalstate]""" @@ -378,12 +420,14 @@ def recurseOneLevel(decaylist, decaytable): del dlist[toexpand] for entry in subdec.daughters: dlist[entry] = dlist.get(entry, 0) + subdec.daughters[entry] - newdecay = Decay(decay[1].bf*subdec.bf, dlist) + newdecay = Decay(decay[1].bf * subdec.bf, dlist) rv.append([chain, newdecay]) decaylist[:] = rv + def compactDecayList(decaylist): - rv = []; used_daughter_list = [] + rv = [] + used_daughter_list = [] for decay in decaylist: if decay[1].daughters not in used_daughter_list: rv.append(Decay(decay[1].bf, decay[1].daughters)) @@ -395,15 +439,15 @@ def compactDecayList(decaylist): return rv -if __name__ == '__main__': -## decparser().cmdloop( -## """Hi! We are beginning the loop.""" -## ) -## q = decparser(stdin=file('/home/ponyisi/DECAY.DEC', 'r'), stdout=sys.stdout) -## q.use_rawinput = False -## q.cmdloop() -## print q.decaylist +if __name__ == "__main__": + ## decparser().cmdloop( + ## """Hi! We are beginning the loop.""" + ## ) + ## q = decparser(stdin=file('/home/ponyisi/DECAY.DEC', 'r'), stdout=sys.stdout) + ## q.use_rawinput = False + ## q.cmdloop() + ## print q.decaylist interactive().cmdloop( "Hi! Welcome to the DECAY.DEC parser program.\n" "Blame ponyisi@lepp if any problems arise." - ) + ) diff --git a/decaylanguage/dec/enums.py b/decaylanguage/dec/enums.py index 47e8fed7..b6bd14ad 100644 --- a/decaylanguage/dec/enums.py +++ b/decaylanguage/dec/enums.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -68,5 +69,5 @@ class PhotosEnum(IntEnum): "VVPIPI", "VVP", "VVS_PWAVE", - "YMSTOYNSPIPICLEO" - ) + "YMSTOYNSPIPICLEO", +) diff --git a/decaylanguage/decay/__init__.py b/decaylanguage/decay/__init__.py index 86aa9d6c..66062ef1 100644 --- a/decaylanguage/decay/__init__.py +++ b/decaylanguage/decay/__init__.py @@ -1,4 +1,4 @@ - +# -*- coding: utf-8 -*- from .decay import DaughtersDict, DecayMode, DecayChain from .viewer import DecayChainViewer diff --git a/decaylanguage/decay/decay.py b/decaylanguage/decay/decay.py index 0980ea54..41cff375 100644 --- a/decaylanguage/decay/decay.py +++ b/decaylanguage/decay/decay.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -63,7 +64,7 @@ def to_string(self): """ Return the daughters as a string representation (ordered list of names). """ - return ' '.join(sorted(self.elements())) + return " ".join(sorted(self.elements())) def to_list(self): """ @@ -95,11 +96,14 @@ def charge_conjugate(self, pdg_name=False): >>> dd.charge_conjugate(pdg_name=True) """ - return self.__class__({charge_conjugate_name(p, pdg_name):n for p, n in self.items()}) + return self.__class__( + {charge_conjugate_name(p, pdg_name): n for p, n in self.items()} + ) def __repr__(self): return "<{self.__class__.__name__}: {daughters}>".format( - self=self, daughters=self.to_list()) + self=self, daughters=self.to_list() + ) def __str__(self): return repr(self) @@ -143,11 +147,9 @@ class DecayMode(object): with any kind of particle names (basically an iterable of strings). """ - __slots__ = ("bf", - "daughters", - "metadata") + __slots__ = ("bf", "daughters", "metadata") - def __init__(self, bf = 0, daughters = None, **info): + def __init__(self, bf=0, daughters=None, **info): """ Default constructor. @@ -195,7 +197,7 @@ def __init__(self, bf = 0, daughters = None, **info): self.bf = bf self.daughters = DaughtersDict(daughters) - self.metadata = dict(model='', model_params='') + self.metadata = dict(model="", model_params="") self.metadata.update(**info) @classmethod @@ -223,8 +225,8 @@ def from_dict(cls, decay_mode_dict): dm = deepcopy(decay_mode_dict) try: - bf = dm.pop('bf') - daughters = dm.pop('fs') + bf = dm.pop("bf") + daughters = dm.pop("fs") except: raise RuntimeError("Input not in the expected format!") @@ -252,9 +254,10 @@ def from_pdgids(cls, bf, daughters, **info): try: from particle import PDGID, ParticleNotFound from particle.converters import EvtGenName2PDGIDBiMap + daughters = [EvtGenName2PDGIDBiMap[PDGID(d)] for d in daughters] except ParticleNotFound: - raise ParticleNotFound('Please check your input PDG IDs!') + raise ParticleNotFound("Please check your input PDG IDs!") daughters = DaughtersDict(daughters) @@ -266,14 +269,16 @@ def describe(self): Make a nice high-density string for all decay-mode properties and info. """ val = """Daughters: {daughters} , BF: {bf:<15.8g} - Decay model: {model} {model_params}""".format(daughters=' '.join(self.daughters), - bf=self.bf, - model=self.metadata['model'], - model_params=self.metadata['model_params'] - if self.metadata['model_params'] is not None else '') - - keys = [k for k in self.metadata - if k not in ('model', 'model_params')] + Decay model: {model} {model_params}""".format( + daughters=" ".join(self.daughters), + bf=self.bf, + model=self.metadata["model"], + model_params=self.metadata["model_params"] + if self.metadata["model_params"] is not None + else "", + ) + + keys = [k for k in self.metadata if k not in ("model", "model_params")] if len(keys) > 0: val += "\n Extra info:\n" for key in keys: @@ -298,10 +303,10 @@ def to_dict(self): 'study': 'toy', 'year': 2019} """ - d = {'bf': self.bf, 'fs': self.daughters.to_list()} + d = {"bf": self.bf, "fs": self.daughters.to_list()} d.update(self.metadata) - if d['model_params'] is None: - d['model_params'] = '' + if d["model_params"] is None: + d["model_params"] = "" return d def charge_conjugate(self, pdg_name=False): @@ -328,9 +333,9 @@ def charge_conjugate(self, pdg_name=False): >>> dm.charge_conjugate(pdg_name=True) """ - return self.__class__(self.bf, - self.daughters.charge_conjugate(pdg_name), - **self.metadata) + return self.__class__( + self.bf, self.daughters.charge_conjugate(pdg_name), **self.metadata + ) def __len__(self): """ @@ -340,9 +345,10 @@ def __len__(self): def __repr__(self): return "<{self.__class__.__name__}: daughters={daughters}, BF={bf}>".format( - self=self, - daughters=self.daughters.to_string() if len(self.daughters)>0 else '[]', - bf=self.bf) + self=self, + daughters=self.daughters.to_string() if len(self.daughters) > 0 else "[]", + bf=self.bf, + ) def __str__(self): return repr(self) @@ -366,8 +372,7 @@ class DecayChain(object): unless there is a good motivation not to. """ - __slots__ = ("mother", - "decays") + __slots__ = ("mother", "decays") def __init__(self, mother, decays): """ @@ -407,27 +412,26 @@ def build_decay_modes(dc_dict): dms = dc_dict[mother] for dm in dms: - if has_no_subdecay(dm['fs']): + if has_no_subdecay(dm["fs"]): decay_modes[mother] = DecayMode.from_dict(dm) else: d = deepcopy(dm) - for i in range(len(d['fs'])): - if isinstance(d['fs'][i], dict): + for i in range(len(d["fs"])): + if isinstance(d["fs"][i], dict): # Replace the element with the key and # store the present decay mode ignoring sub-decays - d['fs'][i] = list(d['fs'][i].keys())[0] + d["fs"][i] = list(d["fs"][i].keys())[0] # Recursively continue ... - build_decay_modes(dm['fs'][i]) + build_decay_modes(dm["fs"][i]) # Create the decay mode now that none of its particles # has a sub-decay decay_modes[mother] = DecayMode.from_dict(d) - decay_modes = dict() mother = list(decay_chain_dict.keys())[0] build_decay_modes(decay_chain_dict) - return cls( mother, decay_modes) + return cls(mother, decay_modes) def top_level_decay(self): """ @@ -507,29 +511,34 @@ def print_as_tree(self): +--> pi+ """ indent = 4 - arrow = '+--> ' - bar = '|' + arrow = "+--> " + bar = "|" # TODO: simplify logic and perform further checks def _print(decay_dict, depth=0, link=False, last=False): mother = list(decay_dict.keys())[0] - prefix = bar if (link and depth>1) else '' - prefix = prefix + ' '*indent*(depth-1) + prefix = bar if (link and depth > 1) else "" + prefix = prefix + " " * indent * (depth - 1) for i, i_decay in enumerate(decay_dict[mother]): - print(prefix, arrow if depth > 0 else '', mother, sep='') - fsps = i_decay['fs'] + print(prefix, arrow if depth > 0 else "", mother, sep="") + fsps = i_decay["fs"] n = len(list(fsps)) depth += 1 for j, fsp in enumerate(fsps): - prefix = bar if (link and depth>1) else '' + prefix = bar if (link and depth > 1) else "" if last: - prefix = prefix + ' '*indent*(depth-1) + ' ' + prefix = prefix + " " * indent * (depth - 1) + " " else: - prefix = (prefix+' '*indent)*(depth-1) + prefix = (prefix + " " * indent) * (depth - 1) if isinstance(fsp, str): - print(prefix, arrow, fsp, sep='') + print(prefix, arrow, fsp, sep="") else: - _print(fsp, depth=depth, link=(link or (j>> dc.flatten(stable_particles=['K_S0', 'pi0']).decays {'D0': } """ - vis_bf = 1. + vis_bf = 1.0 fs = DaughtersDict() keys = self.decays.keys() for k in keys: @@ -614,23 +624,22 @@ def flatten(self, stable_particles=[]): fs[k] -= 1 down_one_level = k in fs.elements() - return DecayChain(self.mother, - {self.mother:DecayMode(vis_bf, - fs, - **self.top_level_decay().metadata) - } - ) + return DecayChain( + self.mother, + {self.mother: DecayMode(vis_bf, fs, **self.top_level_decay().metadata)}, + ) def __repr__(self): if self.mother is None: return "Decay mode: undefined" return "<{self.__class__.__name__}: {mother} -> {tldecay} ({n} sub-decays), BF={bf}>".format( - self=self, - mother=self.mother, - tldecay=self.top_level_decay().daughters.to_string(), - n=len(self.decays)-1, - bf=self.bf) + self=self, + mother=self.mother, + tldecay=self.top_level_decay().daughters.to_string(), + n=len(self.decays) - 1, + bf=self.bf, + ) def __str__(self): return repr(self) diff --git a/decaylanguage/decay/viewer.py b/decaylanguage/decay/viewer.py index 6f68320a..d3f6a8f1 100644 --- a/decaylanguage/decay/viewer.py +++ b/decaylanguage/decay/viewer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -10,6 +11,7 @@ """ import itertools + try: counter = itertools.count().__next__ except AttributeError: @@ -18,7 +20,9 @@ try: import pydot except ImportError: - raise ImportError("You need pydot for this submodule. Please install pydot with for example 'pip install pydot'\n") + raise ImportError( + "You need pydot for this submodule. Please install pydot with for example 'pip install pydot'\n" + ) from particle import latex_to_html_name from particle.converters.bimap import DirectionalMaps @@ -44,9 +48,7 @@ class DecayChainViewer(object): >>> dcv # display the SVG figure in a notebook """ - __slots__ = ("_chain", - "_graph", - "_graph_attributes") + __slots__ = ("_chain", "_graph", "_graph_attributes") def __init__(self, decaychain, **attrs): """ @@ -77,6 +79,7 @@ def _build_decay_graph(self): Recursively navigate the decay chain tree and produce a Graph in the DOT language. """ + def safe_html_name(name): """ Get a safe HTML name from the EvtGen name. @@ -95,77 +98,105 @@ def safe_html_name(name): except: return name - def html_table_label(names, add_tags=False, bgcolor='#9abad6'): + def html_table_label(names, add_tags=False, bgcolor="#9abad6"): if add_tags: - label = '<'.format(bgcolor=bgcolor) + label = ( + '<
'.format( + bgcolor=bgcolor + ) + ) else: - label = '<
'.format(bgcolor=bgcolor) + label = '<
'.format( + bgcolor=bgcolor + ) for i, n in enumerate(names): if add_tags: - label += ''.format(tag=i, name=safe_html_name(n)) + label += ''.format( + tag=i, name=safe_html_name(n) + ) else: - label += ''.format(name=safe_html_name(n)) - label += "{tr}
{name}
{name}
{name}
>".format(tr='' if add_tags else '') + label += '{name}'.format( + name=safe_html_name(n) + ) + label += "{tr}>".format(tr="" if add_tags else "") return label def new_node_no_subchain(list_parts): - label = html_table_label(list_parts, bgcolor='#eef3f8') - r = 'dec%s' % counter() - self._graph.add_node(pydot.Node(r, label=label, style='filled', fillcolor='#eef3f8')) + label = html_table_label(list_parts, bgcolor="#eef3f8") + r = "dec%s" % counter() + self._graph.add_node( + pydot.Node(r, label=label, style="filled", fillcolor="#eef3f8") + ) return r def new_node_with_subchain(list_parts): - list_parts = [list(p.keys())[0] if isinstance(p,dict) else p for p in list_parts] + list_parts = [ + list(p.keys())[0] if isinstance(p, dict) else p for p in list_parts + ] label = html_table_label(list_parts, add_tags=True) - r = 'dec%s' % counter() - self._graph.add_node(pydot.Node(r, shape='none', label=label)) + r = "dec%s" % counter() + self._graph.add_node(pydot.Node(r, shape="none", label=label)) return r def iterate_chain(subchain, top_node=None, link_pos=None): - if not top_node: top_node = node_mother + if not top_node: + top_node = node_mother n_decaymodes = len(subchain) for idm in range(n_decaymodes): - _list_parts = subchain[idm]['fs'] + _list_parts = subchain[idm]["fs"] if not has_subdecay(_list_parts): _ref = new_node_no_subchain(_list_parts) - _bf = subchain[idm]['bf'] + _bf = subchain[idm]["bf"] if link_pos is None: self._graph.add_edge(pydot.Edge(top_node, _ref, label=str(_bf))) else: - self._graph.add_edge(pydot.Edge('%s:p%s'%(top_node, link_pos), _ref, label=str(_bf))) + self._graph.add_edge( + pydot.Edge( + "%s:p%s" % (top_node, link_pos), _ref, label=str(_bf) + ) + ) else: _ref_1 = new_node_with_subchain(_list_parts) - _bf_1 = subchain[idm]['bf'] + _bf_1 = subchain[idm]["bf"] if link_pos is None: - self._graph.add_edge(pydot.Edge(top_node, _ref_1, label=str(_bf_1))) + self._graph.add_edge( + pydot.Edge(top_node, _ref_1, label=str(_bf_1)) + ) else: - self._graph.add_edge(pydot.Edge('%s:p%s'%(top_node, link_pos), _ref_1, label=str(_bf_1))) + self._graph.add_edge( + pydot.Edge( + "%s:p%s" % (top_node, link_pos), + _ref_1, + label=str(_bf_1), + ) + ) for i, _p in enumerate(_list_parts): - if not isinstance(_p,str): + if not isinstance(_p, str): _k = list(_p.keys())[0] iterate_chain(_p[_k], top_node=_ref_1, link_pos=i) - has_subdecay = lambda ds: not all([isinstance(p,str) for p in ds]) + has_subdecay = lambda ds: not all([isinstance(p, str) for p in ds]) k = list(self._chain.keys())[0] - label = html_table_label([k], add_tags=True, bgcolor='#568dba') - node_mother = pydot.Node("mother", shape='none', label=label) + label = html_table_label([k], add_tags=True, bgcolor="#568dba") + node_mother = pydot.Node("mother", shape="none", label=label) self._graph.add_node(node_mother) sc = self._chain[k] # Actually build the whole decay chain, iteratively iterate_chain(sc) - def visualize_decay_graph(self, format='png'): + def visualize_decay_graph(self, format="png"): """ Visualize the Graph produced, opening the file ('png' by default) with the machine default program. """ import tempfile import webbrowser - tmpf = tempfile.NamedTemporaryFile(prefix='DecayChainViewer', - suffix='.{0}'.format(format), - delete=False) + + tmpf = tempfile.NamedTemporaryFile( + prefix="DecayChainViewer", suffix=".{0}".format(format), delete=False + ) self.graph.write(tmpf.name, format=format) tmpf.close() return webbrowser.open(tmpf.name) @@ -193,7 +224,7 @@ def _instantiate_graph(self, **attrs): """ # Make sure the user cannot override the graph type try: - del attrs['graph_type'] + del attrs["graph_type"] except KeyError: pass @@ -211,18 +242,17 @@ def _get_graph_defaults(self): """ Note: the graph type cannot be overriden. """ - return dict(graph_type='digraph', graph_name='DecayChainGraph', - rankdir='LR') + return dict(graph_type="digraph", graph_name="DecayChainGraph", rankdir="LR") def _get_node_defaults(self): - return dict(fontname='Helvetica', fontsize=11, shape='oval') + return dict(fontname="Helvetica", fontsize=11, shape="oval") def _get_edge_defaults(self): - return dict(fontcolor='#4c4c4c', fontsize=11) + return dict(fontcolor="#4c4c4c", fontsize=11) def _repr_svg_(self): """ IPython display in SVG format. """ - return self.graph.create_svg().decode('UTF-8') + return self.graph.create_svg().decode("UTF-8") diff --git a/decaylanguage/modeling/__init__.py b/decaylanguage/modeling/__init__.py index a2c1b03f..e2070f41 100644 --- a/decaylanguage/modeling/__init__.py +++ b/decaylanguage/modeling/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .amplitudechain import LS from .amplitudechain import AmplitudeChain diff --git a/decaylanguage/modeling/ampgen2goofit.py b/decaylanguage/modeling/ampgen2goofit.py index 621f331c..0aa395c0 100644 --- a/decaylanguage/modeling/ampgen2goofit.py +++ b/decaylanguage/modeling/ampgen2goofit.py @@ -1,14 +1,14 @@ #!/usr/bin/env python -# coding: utf-8 +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE # or https://github.com/scikit-hep/decaylanguage for details. -''' +""" This is a function that takes a filename and either prints out or returns a string output with the converted set of decay chains and variables. -''' +""" from __future__ import absolute_import from __future__ import division @@ -35,52 +35,58 @@ def ampgen2goofit(filename, ret_output=False): lines, all_states = GooFitChain.read_ampgen(str(filename)) - printer(r'/* Autogenerated file by AmpGen2GooFit') - printer('Generated on ', datetime.datetime.now()) + printer(r"/* Autogenerated file by AmpGen2GooFit") + printer("Generated on ", datetime.datetime.now()) - printer('\n') + printer("\n") for seen_factor in {p.spindetails() for p in lines}: my_lines = [p for p in lines if p.spindetails() == seen_factor] printer(colors.bold | seen_factor, ":", *my_lines[0].spinfactors) for line in my_lines: - printer(' ', colors.blue | str(line)) + printer(" ", colors.blue | str(line)) - printer('\n') + printer("\n") for spintype in SpinType: - ps = [format(str(p), '11') - for p in sorted(GooFitChain.all_particles) if p.spin_type == spintype] + ps = [ + format(str(p), "11") + for p in sorted(GooFitChain.all_particles) + if p.spin_type == spintype + ] printer("{spintype.name:>12}:".format(spintype=spintype), *ps) - printer('\n') + printer("\n") for n, line in enumerate(lines): - printer('{n:2} {line!s:<70} spinfactors: {lensf} L: {line.L} [{Lr[0]}-{Lr[1]}]' - .format(n=n, line=line, lensf=len(line.spinfactors), Lr=line.L_range())) + printer( + "{n:2} {line!s:<70} spinfactors: {lensf} L: {line.L} [{Lr[0]}-{Lr[1]}]".format( + n=n, line=line, lensf=len(line.spinfactors), Lr=line.L_range() + ) + ) # We can make the GooFit Intro code: - printer(colors.bold & colors.green | '\n\nAll discovered spin configurations:') + printer(colors.bold & colors.green | "\n\nAll discovered spin configurations:") for line in sorted({line.spindetails() for line in lines}): printer(colors.green | line) - printer(colors.bold & colors.blue | '\n\nAll known spin configurations:') + printer(colors.bold & colors.blue | "\n\nAll known spin configurations:") # TODO: 4 body only for e in SF_4Body: printer(colors.blue | e.name) - printer('\n*/\n\n // Intro') + printer("\n*/\n\n // Intro") printer(GooFitChain.make_intro(all_states)) - printer('\n\n // Parameters') + printer("\n\n // Parameters") printer(GooFitChain.make_pars()) # And the lines can be turned into code, as well: - printer('\n\n // Lines') + printer("\n\n // Lines") for n, line in enumerate(lines): - printer(' // Line', n) - printer(line.to_goofit(all_states[1:]), end='\n\n\n') + printer(" // Line", n) + printer(line.to_goofit(all_states[1:]), end="\n\n\n") - if(ret_output): + if ret_output: return output.getvalue() diff --git a/decaylanguage/modeling/ampgentransform.py b/decaylanguage/modeling/ampgentransform.py index c6e39e3d..b58ea92d 100644 --- a/decaylanguage/modeling/ampgentransform.py +++ b/decaylanguage/modeling/ampgentransform.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -5,45 +6,52 @@ from lark import Transformer, Tree + def get_from_parser(parser, key): return [v.children for v in parser.find_data(key)] + class AmpGenTransformer(Transformer): def constant(self, lines): particle, value = lines - return Tree('constant', [str(particle.children[0]), float(value)]) + return Tree("constant", [str(particle.children[0]), float(value)]) + def event_type(self, lines): - return Tree('event_type', [str(p.children[0]) for p in lines]) + return Tree("event_type", [str(p.children[0]) for p in lines]) + def fixed(self, lines): return False + def free(self, lines): return True + def variable(self, lines): p, free, value, error = lines - return Tree('variable', [str(p.children[0]), free, float(value), float(error)]) + return Tree("variable", [str(p.children[0]), free, float(value), float(error)]) + def cplx_decay_line(self, lines): decay, real, imag = lines real_free, real_val, real_err = real.children imag_free, imag_val, imag_err = imag.children - decay['fix'] = not (real_free and imag_free) - decay['amp'] = complex(float(real_val), float(imag_val)) - decay['err'] = complex(float(real_err), float(imag_err)) + decay["fix"] = not (real_free and imag_free) + decay["amp"] = complex(float(real_val), float(imag_val)) + decay["err"] = complex(float(real_err), float(imag_err)) - return Tree('cplx_decay_line', decay) + return Tree("cplx_decay_line", decay) def decay(self, lines): - particle, = lines[0].children - dic = {'name' : str(particle), 'daughters' : []} + (particle,) = lines[0].children + dic = {"name": str(particle), "daughters": []} for line in lines[1:]: - if line.data == 'subdecay': - dic['daughters'] += line.children - elif line.data == 'decaytype': + if line.data == "subdecay": + dic["daughters"] += line.children + elif line.data == "decaytype": for children in line.children: - if children.data == 'spinfactor': - dic['spinfactor'], = children.children - elif children.data == 'lineshape': - dic['lineshape'], = children.children + if children.data == "spinfactor": + (dic["spinfactor"],) = children.children + elif children.data == "lineshape": + (dic["lineshape"],) = children.children return dic diff --git a/decaylanguage/modeling/amplitudechain.py b/decaylanguage/modeling/amplitudechain.py index 2eab8a89..f219c911 100644 --- a/decaylanguage/modeling/amplitudechain.py +++ b/decaylanguage/modeling/amplitudechain.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE # or https://github.com/scikit-hep/decaylanguage for details. -''' +""" A class representing a set of decays. Can be subclassed to provide custom converters. -''' +""" from __future__ import absolute_import from __future__ import division @@ -37,8 +38,9 @@ from .ampgentransform import AmpGenTransformer, get_from_parser + class LS(Enum): - 'Line shapes supported (currently)' + "Line shapes supported (currently)" RBW = 0 GSpline = 1 kMatrix = 2 @@ -51,12 +53,11 @@ class AmplitudeChain(ModelDecay): lineshape = attr.ib(None) spinfactor = attr.ib(None) - amp = attr.ib(1+0j, cmp=False, validator=attr.validators.instance_of(complex)) - err = attr.ib(0+0j, cmp=False, validator=attr.validators.instance_of(complex)) + amp = attr.ib(1 + 0j, cmp=False, validator=attr.validators.instance_of(complex)) + err = attr.ib(0 + 0j, cmp=False, validator=attr.validators.instance_of(complex)) fix = attr.ib(True, cmp=False, validator=attr.validators.instance_of(bool)) name = attr.ib(None) - # Class members keep track of additions all_particles = set() final_particles = set() @@ -66,55 +67,62 @@ class AmplitudeChain(ModelDecay): @classmethod def from_matched_line(cls, mat): - ''' + """ This operates on an already-matched line. :param mat: The groupdict output of a match :return: A new amplitude chain instance - ''' + """ - getall = 'all' if hasattr(Particle, 'all') else 'table' # Support 0.4.4 + getall = "all" if hasattr(Particle, "all") else "table" # Support 0.4.4 # Check to see if new particles loaded; if not, load them. if 998100 not in getattr(Particle, getall)(): data_dir = os.path.dirname(os.path.realpath(__file__)) - special_filename = os.path.join(data_dir, '..', 'data', 'MintDalitzSpecialParticles.csv') + special_filename = os.path.join( + data_dir, "..", "data", "MintDalitzSpecialParticles.csv" + ) Particle.load_table(special_filename, append=True) try: - mat['particle'] = Particle.from_string(mat['name']) + mat["particle"] = Particle.from_string(mat["name"]) except: - print("Failed to find particle", mat['name'], "with parsed dictionary:", mat, file=sys.stderr) + print( + "Failed to find particle", + mat["name"], + "with parsed dictionary:", + mat, + file=sys.stderr, + ) raise + if mat["particle"] not in cls.all_particles: + cls.all_particles |= {mat["particle"]} - if mat['particle'] not in cls.all_particles: - cls.all_particles |= {mat['particle']} - - if mat['daughters']: - mat['daughters'] = [cls.from_matched_line(d) for d in mat['daughters']] + if mat["daughters"]: + mat["daughters"] = [cls.from_matched_line(d) for d in mat["daughters"]] # if master line only - if 'amp' in mat and not cls.cartesian: - A = mat['amp'].real - dA = mat['err'].real - theta = mat['amp'].imag - dtheta = mat['err'].imag - - mat['amp'] = A * np.exp(theta*1j) + if "amp" in mat and not cls.cartesian: + A = mat["amp"].real + dA = mat["err"].real + theta = mat["amp"].imag + dtheta = mat["err"].imag - mat['err'] = ((dA*np.cos(theta) + A*np.sin(dtheta)) - + (dA*np.sin(theta) + A*np.cos(dtheta))*1j) + mat["amp"] = A * np.exp(theta * 1j) + mat["err"] = (dA * np.cos(theta) + A * np.sin(dtheta)) + ( + dA * np.sin(theta) + A * np.cos(dtheta) + ) * 1j return cls(**mat) def expand_lines(self, linelist): - ''' + """ Take a DecayTree -> list of DecayTrees with each dead-end daughter expanded to every possible combination. (recursive) - ''' + """ # If the Tree has daugthers, run on those if self.daughters: @@ -127,8 +135,12 @@ def expand_lines(self, linelist): return retlist # If the tree ends - new_trees = [l for line in linelist if line.name == - self.name for l in line.expand_lines(linelist)] + new_trees = [ + l + for line in linelist + if line.name == self.name + for l in line.expand_lines(linelist) + ] if new_trees: return new_trees else: @@ -139,11 +151,11 @@ def expand_lines(self, linelist): def ls_enum(self): if not self.lineshape: return LS.RBW - elif 'GSpline.EFF' == self.lineshape: + elif "GSpline.EFF" == self.lineshape: return LS.GSpline - elif self.lineshape.startswith('kMatrix'): + elif self.lineshape.startswith("kMatrix"): return LS.kMatrix - elif self.lineshape.startswith('FOCUS'): + elif self.lineshape.startswith("FOCUS"): return LS.FOCUS else: raise RuntimeError("Unimplemented lineshape {0}".format(self.lineshape)) @@ -155,14 +167,13 @@ def full_amp(self): amp *= d.full_amp return amp - def L_range(self, conserveParity=False): S = self.particle.J s1 = self[0].particle.J s2 = self[1].particle.J - min_spin = abs(S-s1-s2) - min_spin = min(abs(S+s1-s2), min_spin) - min_spin = min(abs(S-s1+s2), min_spin) + min_spin = abs(S - s1 - s2) + min_spin = min(abs(S + s1 - s2), min_spin) + min_spin = min(abs(S - s1 + s2), min_spin) max_spin = S + s1 + s2 if not conserveParity: return (min_spin, max_spin) @@ -172,47 +183,50 @@ def L_range(self, conserveParity=False): @property def L(self): if self.spinfactor: - return 'S P D F'.split().index(self.spinfactor) + return "S P D F".split().index(self.spinfactor) min_L, _ = self.L_range() return min_L # Ground state unless specified - def _get_html(self): name = self.particle.html_name - name = re.sub(r'(.*)', u'\\1\u0305', name) + name = re.sub( + r'(.*)', u"\\1\u0305", name + ) if self.spinfactor or self.lineshape: - name += '

' + name += "

" if self.spinfactor: - name += '[' + self.spinfactor + ']' + name += '[' + self.spinfactor + "]" if self.lineshape: - name += '[' + self.lineshape + ']' + name += '[' + self.lineshape + "]" return name def __str__(self): name = str(self.particle) if self.lineshape and self.spinfactor: - name += '[' + self.spinfactor + ';' + self.lineshape + ']' + name += "[" + self.spinfactor + ";" + self.lineshape + "]" elif self.lineshape: - name += '[' + self.lineshape + ']' + name += "[" + self.lineshape + "]" elif self.spinfactor: - name += '[' + self.spinfactor + ']' + name += "[" + self.spinfactor + "]" if self.daughters: - name += '{'+','.join(map(str, self.daughters))+'}' + name += "{" + ",".join(map(str, self.daughters)) + "}" return name @classmethod - def read_ampgen(cls, filename=None, text=None, grammar=None, parser='lalr', **kargs): - ''' + def read_ampgen( + cls, filename=None, text=None, grammar=None, parser="lalr", **kargs + ): + """ Read in an ampgen file :param filename: Filename to read :param text: Text to read (use instead of filename) :return: array of AmplitudeChains, parameters, constants, event type - ''' + """ if grammar is None: - grammar = open_text(data, 'ampgen.lark') + grammar = open_text(data, "ampgen.lark") # Read the file in, ignore empty lines and comments if filename is not None: @@ -224,13 +238,13 @@ def read_ampgen(cls, filename=None, text=None, grammar=None, parser='lalr', **ka lark = Lark(grammar, parser=parser, transformer=AmpGenTransformer(), **kargs) parsed = lark.parse(text) - event_type, = get_from_parser(parsed, 'event_type') + (event_type,) = get_from_parser(parsed, "event_type") - invert_lines = get_from_parser(parsed, 'invert_line') - cplx_decay_lines = get_from_parser(parsed, 'cplx_decay_line') - cart_decay_lines = get_from_parser(parsed, 'cart_decay_line') - variables = get_from_parser(parsed, 'variable') - constants = get_from_parser(parsed, 'constant') + invert_lines = get_from_parser(parsed, "invert_line") + cplx_decay_lines = get_from_parser(parsed, "cplx_decay_line") + cart_decay_lines = get_from_parser(parsed, "cart_decay_line") + variables = get_from_parser(parsed, "variable") + constants = get_from_parser(parsed, "constant") try: all_states = [Particle.from_string(n) for n in event_type] @@ -238,13 +252,12 @@ def read_ampgen(cls, filename=None, text=None, grammar=None, parser='lalr', **ka print("Did not find at least one of the state particles from", *event_type) raise - fcs = get_from_parser(parsed, 'fast_coherent_sum') + fcs = get_from_parser(parsed, "fast_coherent_sum") if fcs: - fcs, = fcs - fcs, = fcs.children + (fcs,) = fcs + (fcs,) = fcs.children cls.cartesian = bool(fcs) - # TODO: re-enable this # Combine dual line Cartesian lines into traditional cartesian lines # for a, b in combinations(cart_decay_lines, 2): @@ -260,18 +273,24 @@ def read_ampgen(cls, filename=None, text=None, grammar=None, parser='lalr', **ka # real_lines.append(ampline.dual.match(new_string).groupdict()) # Make the partial lines and constants as dataframes - parameters = pd.DataFrame(variables, - columns='name fix value error'.split()).set_index('name') + parameters = pd.DataFrame( + variables, columns="name fix value error".split() + ).set_index("name") - constants = pd.DataFrame(constants, - columns='name value'.split()).set_index('name') + constants = pd.DataFrame(constants, columns="name value".split()).set_index( + "name" + ) # Convert the matches into AmplitudeChains line_arr = [cls.from_matched_line(c) for c in cplx_decay_lines] # Expand partial lines into complete lines - new_line_arr = [l for line in line_arr if line.particle == all_states[0] - for l in line.expand_lines(line_arr)] + new_line_arr = [ + l + for line in line_arr + if line.particle == all_states[0] + for l in line.expand_lines(line_arr) + ] # Return return new_line_arr, parameters, constants, all_states diff --git a/decaylanguage/modeling/decay.py b/decaylanguage/modeling/decay.py index 912698fd..ca36929b 100644 --- a/decaylanguage/modeling/decay.py +++ b/decaylanguage/modeling/decay.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE # or https://github.com/scikit-hep/decaylanguage for details. -''' +""" A general base class representing decays. -''' +""" from __future__ import absolute_import @@ -27,12 +28,13 @@ graphviz = None warnings.warn("Graphviz is not installed. Line display not available.") + @attr.s(slots=True) class ModelDecay(object): - ''' + """ This describes a decay very generally, with search and print features. Subclassed for futher usage. - ''' + """ particle = attr.ib() daughters = attr.ib([], converter=lambda x: x if x else []) @@ -48,7 +50,9 @@ def is_vertex(self): def is_strong(self): if not self.is_vertex(): return None - return set(self.particle.quarks) == set(self[0].particle.quarks).union(set(self[1].particle.quarks)) + return set(self.particle.quarks) == set(self[0].particle.quarks).union( + set(self[1].particle.quarks) + ) def __len__(self): return len(self.daughters) @@ -57,9 +61,9 @@ def __getitem__(self, item): return self.daughters[item] def _get_html(self): - ''' + """ Get the html dot representation of this node only. Override in subclasses. - ''' + """ return self.particle.html_name def _add_nodes(self, drawing): @@ -69,7 +73,6 @@ def _add_nodes(self, drawing): drawing.edge(str(id(self)), str(id(p))) p._add_nodes(drawing) - @property def vertexes(self): verts = [] @@ -81,41 +84,46 @@ def vertexes(self): @property def structure(self): - ''' + """ The structure of the decay chain, simplified to only final state particles - ''' + """ if self.daughters: return [d.structure for d in self.daughters] else: return self.particle def list_structure(self, final_states): - ''' + """ The structure in the form [(0,1,2,3)], where the dual-list is used for permutations for bose symmatrization. So for final_states=[a,b,c,c], [a,c,[c,b]] would be: [(0,2,3,1),(0,3,2,1)] - ''' + """ structure = list(iter_flatten(self.structure)) if set(structure) - set(final_states): - raise RuntimeError("The final states must encompass all particles in final states!") + raise RuntimeError( + "The final states must encompass all particles in final states!" + ) - possibilities = [[i for i, v in enumerate(final_states) if v == name] for name in structure] + possibilities = [ + [i for i, v in enumerate(final_states) if v == name] for name in structure + ] return [a for a in product(*possibilities) if len(set(a)) == len(a)] def __str__(self): name = str(self.particle) if self.daughters: - name += '{'+','.join(map(str, self.daughters))+'}' + name += "{" + ",".join(map(str, self.daughters)) + "}" return name if graphviz: + def _make_graphviz(self): d = graphviz.Digraph() - d.attr(labelloc='t', label=str(self)) + d.attr(labelloc="t", label=str(self)) self._add_nodes(d) return d diff --git a/decaylanguage/modeling/goofit.py b/decaylanguage/modeling/goofit.py index 4683b829..2260b336 100644 --- a/decaylanguage/modeling/goofit.py +++ b/decaylanguage/modeling/goofit.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE # or https://github.com/scikit-hep/decaylanguage for details. -''' +""" This is a GooFit adaptor for amplitude chain. -''' +""" from __future__ import absolute_import from __future__ import division @@ -43,28 +44,17 @@ class SF_4Body(Enum): known_spinfactors = { - 'DtoA1P1_A1toS2P2_S2toP3P4': - (SF_4Body.DtoAP1_AtoSP2_StoP3P4,), - 'DtoA1P1_A1toV2P2Dwave_V2toP3P4': - (SF_4Body.DtoAP1_AtoVP2Dwave_VtoP3P4,), - 'DtoA1P1_A1toV2P2_V2toP3P4': - (SF_4Body.DtoAP1_AtoVP2Dwave_VtoP3P4,), - 'DtoS1S2_S1toP1P2_S2toP3P4': - (SF_4Body.ONE,), - 'DtoT1P1_T1toV2P2_V2toP3P4': - (SF_4Body.DtoTP1_TtoVP2_VtoP3P4,), - 'DtoV1S2_V1toP1P2_S2toP3P4': - (SF_4Body.DtoVS_VtoP1P2_StoP3P4,), - 'DtoV1V2_V1toP1P2_V2toP3P4': - (SF_4Body.DtoV1V2_V1toP1P2_V2toP3P4_S,), - 'DtoV1V2_V1toP1P2_V2toP3P4_D': - (SF_4Body.DtoV1V2_V1toP1P2_V2toP3P4_D,), - 'DtoV1V2_V1toP1P2_V2toP3P4_P': - (SF_4Body.DtoV1V2_V1toP1P2_V2toP3P4_P,), - 'Dtos1P1_s1toS2P2_S2toP3P4': - (SF_4Body.DtoPP1_PtoSP2_StoP3P4,), - 'Dtos1P1_s1toV2P2_V2toP3P4': - (SF_4Body.DtoPP1_PtoVP2_VtoP3P4,), + "DtoA1P1_A1toS2P2_S2toP3P4": (SF_4Body.DtoAP1_AtoSP2_StoP3P4,), + "DtoA1P1_A1toV2P2Dwave_V2toP3P4": (SF_4Body.DtoAP1_AtoVP2Dwave_VtoP3P4,), + "DtoA1P1_A1toV2P2_V2toP3P4": (SF_4Body.DtoAP1_AtoVP2Dwave_VtoP3P4,), + "DtoS1S2_S1toP1P2_S2toP3P4": (SF_4Body.ONE,), + "DtoT1P1_T1toV2P2_V2toP3P4": (SF_4Body.DtoTP1_TtoVP2_VtoP3P4,), + "DtoV1S2_V1toP1P2_S2toP3P4": (SF_4Body.DtoVS_VtoP1P2_StoP3P4,), + "DtoV1V2_V1toP1P2_V2toP3P4": (SF_4Body.DtoV1V2_V1toP1P2_V2toP3P4_S,), + "DtoV1V2_V1toP1P2_V2toP3P4_D": (SF_4Body.DtoV1V2_V1toP1P2_V2toP3P4_D,), + "DtoV1V2_V1toP1P2_V2toP3P4_P": (SF_4Body.DtoV1V2_V1toP1P2_V2toP3P4_P,), + "Dtos1P1_s1toS2P2_S2toP3P4": (SF_4Body.DtoPP1_PtoSP2_StoP3P4,), + "Dtos1P1_s1toV2P2_V2toP3P4": (SF_4Body.DtoPP1_PtoVP2_VtoP3P4,), } @@ -91,36 +81,44 @@ class GooFitChain(AmplitudeChain): @classmethod def make_intro(cls, all_states): - header = ' // Event type: {} -> '.format(all_states[0]) - header += ' '.join('{} ({})'.format(b, a) for a, b in enumerate(all_states[1:])) + header = " // Event type: {} -> ".format(all_states[0]) + header += " ".join( + "{} ({})".format(b, a) for a, b in enumerate(all_states[1:]) + ) - header += '''\n + header += """\n std::vector> line_factor_list; std::vector> spin_factor_list; std::vector amplitudes_list; -''' +""" final_particles = set(all_states) for particle in final_particles: name = particle.programmatic_name.upper() - header += ' constexpr fptype {name:8} {{ {particle.mass:<14.8g} }};\n'.format( - name=name, particle=particle) + header += ( + " constexpr fptype {name:8} {{ {particle.mass:<14.8g} }};\n".format( + name=name, particle=particle + ) + ) - header += '\n' + header += "\n" for particle in cls.all_particles - final_particles: name = particle.programmatic_name - header += ' Variable {name:15} {{ {nameQ:21}, {particle.mass:<10.8g} }};\n'.format( - name=name+'_M', nameQ='"'+name+'_M"', particle=particle) - header += ' Variable {name:15} {{ {nameQ:21}, {particle.width:<10.8g} }};\n'.format( - name=name+'_W', nameQ='"'+name+'_W"', particle=particle) - - header += '\n' - header += ' DK3P_DI.meson_radius = 5;\n' - header += ' DK3P_DI.particle_masses = {{{}}};\n'.format( - ', '.join(x.programmatic_name.upper() for x in all_states)) + header += " Variable {name:15} {{ {nameQ:21}, {particle.mass:<10.8g} }};\n".format( + name=name + "_M", nameQ='"' + name + '_M"', particle=particle + ) + header += " Variable {name:15} {{ {nameQ:21}, {particle.width:<10.8g} }};\n".format( + name=name + "_W", nameQ='"' + name + '_W"', particle=particle + ) + + header += "\n" + header += " DK3P_DI.meson_radius = 5;\n" + header += " DK3P_DI.particle_masses = {{{}}};\n".format( + ", ".join(x.programmatic_name.upper() for x in all_states) + ) return header @@ -141,23 +139,33 @@ def formfactor(self): elif self.L == 2: return SF_4Body.FF_12_34_L2 if norm else SF_4Body.FF_123_4_L2 else: - raise NotImplementedError("L = {self.L} is not implemented".format(self=self)) + raise NotImplementedError( + "L = {self.L} is not implemented".format(self=self) + ) def spindetails(self): if self.decay_structure == DecayStructure.FF_12_34: a = "{0}1".format(sprint(self[0].particle.spin_type)) b = "{0}2".format(sprint(self[1].particle.spin_type)) - return ("Dto{a}{b}_{a}toP1P2_{b}toP3P4" - + ("_{self.spinfactor}" if self.spinfactor and self.spinfactor != 'S' else "") - ).format(self=self, a=a, b=b) + return ( + "Dto{a}{b}_{a}toP1P2_{b}toP3P4" + + ( + "_{self.spinfactor}" + if self.spinfactor and self.spinfactor != "S" + else "" + ) + ).format(self=self, a=a, b=b) else: a = "{0}1".format(sprint(self[0].particle.spin_type)) if self[0].daughters: b = "{0}2".format(sprint(self[0][0].particle.spin_type)) else: raise LineFailure(self, "{0} has no daughters".format(self[0])) - wave = "{self[0].spinfactor}wave".format( - self=self) if self[0].spinfactor and self[0].spinfactor != 'S' else "" + wave = ( + "{self[0].spinfactor}wave".format(self=self) + if self[0].spinfactor and self[0].spinfactor != "S" + else "" + ) return "Dto{a}P1_{a}to{b}P2{wave}_{b}toP3P4".format(a=a, b=b, wave=wave) @property @@ -168,8 +176,12 @@ def spinfactors(self): spinfactor.append(self.formfactor) return spinfactor - raise LineFailure(self, "Spinfactors not currenly included!: {spindet}".format( - spindet=self.spindetails())) + raise LineFailure( + self, + "Spinfactors not currenly included!: {spindet}".format( + spindet=self.spindetails() + ), + ) # if self.decay_structure == DecayStructure.FF_12_34 : # if (self[0].particle.spin_type in {SpinType.Vector, SpinType.Axial} @@ -192,139 +204,200 @@ def spinfactors(self): @classmethod def make_pars(cls): headerlist = [] - header = '' + header = "" for name, par in cls.pars.iterrows(): pname = programmatic_name(name) if par.fix == 2: - headerlist.append(' Variable {pname} {{"{name}", {par.value}, {par.error} }};'.format( - pname=pname, name=name, par=par)) + headerlist.append( + ' Variable {pname} {{"{name}", {par.value}, {par.error} }};'.format( + pname=pname, name=name, par=par + ) + ) else: - headerlist.append(' Variable {pname} {{"{name}", {par.value} }};'.format( - pname=pname, name=name, par=par)) + headerlist.append( + ' Variable {pname} {{"{name}", {par.value} }};'.format( + pname=pname, name=name, par=par + ) + ) def strip_pararray(pars, begin, convert=lambda x: x): mysplines = pars.index[pars.index.str.contains(begin, regex=False)] vals = convert(mysplines.str.slice(len(begin))).astype(int) series = pd.Series(mysplines, vals).sort_index() - return ',\n'.join(series.map(lambda x: ' '+programmatic_name(x))) + return ",\n".join(series.map(lambda x: " " + programmatic_name(x))) if not GooFitChain.consts.empty: - splines = GooFitChain.consts.index[GooFitChain.consts.index.str.contains("Spline")] - splines = set(splines.str.rstrip("::Spline::N").str.rstrip( - "::Spline::Min").str.rstrip("::Spline::Max")) + splines = GooFitChain.consts.index[ + GooFitChain.consts.index.str.contains("Spline") + ] + splines = set( + splines.str.rstrip("::Spline::N") + .str.rstrip("::Spline::Min") + .str.rstrip("::Spline::Max") + ) for spline in splines: - header += '\n std::vector ' + programmatic_name(spline) + "_SplineArr {{\n" - header += strip_pararray(GooFitChain.pars, - "{spline}::Spline::Gamma::".format(spline=spline)) - header += '\n }};\n' + header += ( + "\n std::vector " + + programmatic_name(spline) + + "_SplineArr {{\n" + ) + header += strip_pararray( + GooFitChain.pars, "{spline}::Spline::Gamma::".format(spline=spline) + ) + header += "\n }};\n" f_scatt = GooFitChain.pars.index[GooFitChain.pars.index.str.contains("f_scatt")] if len(f_scatt): - header += '\n std::array f_scatt {{\n' + header += "\n std::array f_scatt {{\n" header += strip_pararray(GooFitChain.pars, "f_scatt") - header += '\n }};\n' + header += "\n }};\n" IS_mat = GooFitChain.pars.index[GooFitChain.pars.index.str.contains("IS_p")] if len(IS_mat): names = ("pipi", "KK", "4pi", "EtaEta", "EtapEta", "mass") def convert(x): - i = x.str.split('_').str[0] - j = x.str.split('_').str[1].map(lambda x: names.index(x)) - return i.astype(int)*6 + j.astype(int) - header += '\n std::array IS_poles {{\n' + i = x.str.split("_").str[0] + j = x.str.split("_").str[1].map(lambda x: names.index(x)) + return i.astype(int) * 6 + j.astype(int) + + header += "\n std::array IS_poles {{\n" header += strip_pararray(GooFitChain.pars, "IS_p", convert) - header += '\n }};\n' + header += "\n }};\n" - return '\n'.join(headerlist) + '\n' + header + return "\n".join(headerlist) + "\n" + header def make_lineshape(self, structure): name = self.name par = self.particle.programmatic_name - a = structure[0]+1 - b = structure[1]+1 + a = structure[0] + 1 + b = structure[1] + 1 L = self.L - radius = 5.0 if 'c' in self.particle.quarks.lower() else 1.5 + radius = 5.0 if "c" in self.particle.quarks.lower() else 1.5 if self.ls_enum == LS.RBW: - return 'new Lineshapes::RBW("{name}", {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2)'.format(name=name, par=par, L=L, a=a, b=b) + return 'new Lineshapes::RBW("{name}", {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2)'.format( + name=name, par=par, L=L, a=a, b=b + ) elif self.ls_enum == LS.GSpline: - min = self.__class__.consts.loc["{self.name}::Spline::Min".format(self=self), "value"] - max = self.__class__.consts.loc["{self.name}::Spline::Max".format(self=self), "value"] - N = self.__class__.consts.loc["{self.name}::Spline::N".format(self=self), "value"] + min = self.__class__.consts.loc[ + "{self.name}::Spline::Min".format(self=self), "value" + ] + max = self.__class__.consts.loc[ + "{self.name}::Spline::Max".format(self=self), "value" + ] + N = self.__class__.consts.loc[ + "{self.name}::Spline::N".format(self=self), "value" + ] AdditionalVars = programmatic_name(self.name) + "_SplineArr" - return '''new Lineshapes::GSpline("{name}", {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2, - {radius}, {AdditionalVars}, Lineshapes::spline_t({min},{max},{N}))'''.format( - name=name, par=par, L=L, a=a, b=b, radius=radius, AdditionalVars=AdditionalVars, min=min, max=max, N=int(N)) + return """new Lineshapes::GSpline("{name}", {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2, + {radius}, {AdditionalVars}, Lineshapes::spline_t({min},{max},{N}))""".format( + name=name, + par=par, + L=L, + a=a, + b=b, + radius=radius, + AdditionalVars=AdditionalVars, + min=min, + max=max, + N=int(N), + ) elif self.ls_enum == LS.kMatrix: - _, poleprod, pterm = self.lineshape.split('.') - is_pole = 'true' if poleprod == 'pole' else 'false' - return '''new Lineshapes::kMatrix("{name}", {pterm}, {is_pole}, + _, poleprod, pterm = self.lineshape.split(".") + is_pole = "true" if poleprod == "pole" else "false" + return """new Lineshapes::kMatrix("{name}", {pterm}, {is_pole}, sA0, sA, s0_prod, s0_scatt, f_scatt, IS_poles, - {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2, {radius})'''.format( - name=name, pterm=pterm, is_pole=is_pole, par=par, L=L, a=a, b=b, radius=radius) + {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2, {radius})""".format( + name=name, + pterm=pterm, + is_pole=is_pole, + par=par, + L=L, + a=a, + b=b, + radius=radius, + ) elif self.ls_enum == LS.FOCUS: - _, mod = self.lineshape.split('.') - return ('new Lineshapes::FOCUS("{name}", Lineshapes::FOCUS::Mod::{mod},' - ' {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2, {radius})').format( - name=name, mod=mod, par=par, L=L, a=a, b=b, radius=radius) + _, mod = self.lineshape.split(".") + return ( + 'new Lineshapes::FOCUS("{name}", Lineshapes::FOCUS::Mod::{mod},' + " {par}_M, {par}_W, {L}, M_{a}{b}, FF::BL2, {radius})" + ).format(name=name, mod=mod, par=par, L=L, a=a, b=b, radius=radius) else: raise NotImplementedError( - "Unimplemented GooFit Lineshape {self.ls_enum.name}".format(self=self)) + "Unimplemented GooFit Lineshape {self.ls_enum.name}".format(self=self) + ) def make_spinfactor(self, final_states): spin_factors = self.spinfactors - intro = ' spin_factor_list.push_back(std::vector({\n' + intro = " spin_factor_list.push_back(std::vector({\n" factor = [] for structure in self.list_structure(final_states): if not spin_factors: factor.append( - ' // TODO: Spin factor not implemented yet for {spindet}'.format(spindet=self.spindetails())) + " // TODO: Spin factor not implemented yet for {spindet}".format( + spindet=self.spindetails() + ) + ) else: for spin_factor in spin_factors: - structure_list = ', '.join(map(str, structure)) - factor.append(' new SpinFactor("SF", SF_4Body::{spin_factor.name:37}, {structure_list})'.format( - spin_factor=spin_factor, structure_list=structure_list)) - exit = '\n }));\n' - return intro + ',\n'.join(factor) + exit + structure_list = ", ".join(map(str, structure)) + factor.append( + ' new SpinFactor("SF", SF_4Body::{spin_factor.name:37}, {structure_list})'.format( + spin_factor=spin_factor, structure_list=structure_list + ) + ) + exit = "\n }));\n" + return intro + ",\n".join(factor) + exit def make_linefactor(self, final_states): - intro = ' line_factor_list.push_back(std::vector{\n' + intro = " line_factor_list.push_back(std::vector{\n" factor = [] for structure in self.list_structure(final_states): for sub in self.vertexes: - factor.append(' ' + sub.make_lineshape(structure)) - exit = '\n });\n' - return intro + ',\n'.join(factor) + exit + factor.append(" " + sub.make_lineshape(structure)) + exit = "\n });\n" + return intro + ",\n".join(factor) + exit def make_amplitude(self, final_states): n = len(self.list_structure(final_states)) - fix = 'true' if self.fix else 'false' - return (' amplitudes_list.push_back(new Amplitude{{\n' - ' "{self!s}",\n' - ' mkvar("{self!s}_r", {fix}, {self.amp.real:.6}, {self.err.real:.6}),\n' - ' mkvar("{self!s}_i", {fix}, {self.amp.imag:.6}, {self.err.imag:.6}),\n' - ' line_factor_list.back(),\n' - ' spin_factor_list.back(),\n' - ' {n}}});\n\n' - ' DK3P_DI.amplitudes_B.push_back(amplitudes_list.back());'.format( - self=self, fix=fix, n=n)) + fix = "true" if self.fix else "false" + return ( + " amplitudes_list.push_back(new Amplitude{{\n" + ' "{self!s}",\n' + ' mkvar("{self!s}_r", {fix}, {self.amp.real:.6}, {self.err.real:.6}),\n' + ' mkvar("{self!s}_i", {fix}, {self.amp.imag:.6}, {self.err.imag:.6}),\n' + " line_factor_list.back(),\n" + " spin_factor_list.back(),\n" + " {n}}});\n\n" + " DK3P_DI.amplitudes_B.push_back(amplitudes_list.back());".format( + self=self, fix=fix, n=n + ) + ) def to_goofit(self, final_states): - return (' // ' + str(self) + '\n\n' - + self.make_spinfactor(final_states) + '\n' - + self.make_linefactor(final_states) + '\n' - + self.make_amplitude(final_states)) + return ( + " // " + + str(self) + + "\n\n" + + self.make_spinfactor(final_states) + + "\n" + + self.make_linefactor(final_states) + + "\n" + + self.make_amplitude(final_states) + ) @classmethod def read_ampgen(cls, *args, **kargs): line_arr, GooFitChain.pars, GooFitChain.consts, all_states = super( - GooFitChain, cls).read_ampgen(*args, **kargs) + GooFitChain, cls + ).read_ampgen(*args, **kargs) return line_arr, all_states diff --git a/decaylanguage/utils/__init__.py b/decaylanguage/utils/__init__.py index 7e97a2a1..49bb08d9 100644 --- a/decaylanguage/utils/__init__.py +++ b/decaylanguage/utils/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .particleutils import charge_conjugate_name diff --git a/decaylanguage/utils/errors.py b/decaylanguage/utils/errors.py index 94762b61..f2107854 100644 --- a/decaylanguage/utils/errors.py +++ b/decaylanguage/utils/errors.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -10,4 +11,6 @@ class LineFailure(RuntimeError): def __init__(self, line, message, *args, **kwargs): - super(LineFailure, self).__init__("{0}: {1}".format(line, message), *args, **kwargs) + super(LineFailure, self).__init__( + "{0}: {1}".format(line, message), *args, **kwargs + ) diff --git a/decaylanguage/utils/particleutils.py b/decaylanguage/utils/particleutils.py index 82e4c0d0..4f00ecc9 100644 --- a/decaylanguage/utils/particleutils.py +++ b/decaylanguage/utils/particleutils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -7,14 +8,16 @@ try: from functools import lru_cache + cacher = lru_cache(maxsize=64) except ImportError: from cachetools import cached, LFUCache + cacher = cached(cache=LFUCache(maxsize=64)) from particle import Particle from particle.converters import EvtGenName2PDGIDBiMap -from particle.converters import PDG2EvtGenNameMap,EvtGen2PDGNameMap +from particle.converters import PDG2EvtGenNameMap, EvtGen2PDGNameMap from particle.exceptions import MatchingIDNotFound @@ -54,7 +57,7 @@ def charge_conjugate_name(name, pdg_name=False): # Convert the EvtGen name back to a PDG name, to match input type return EvtGen2PDGNameMap[ccname] except MatchingIDNotFound: # Catch issue in PDG2EvtGenNameMap matching - return 'ChargeConj({0})'.format(name) + return "ChargeConj({0})".format(name) # Dealing only with EvtGen names at this stage try: @@ -63,4 +66,4 @@ def charge_conjugate_name(name, pdg_name=False): try: return EvtGenName2PDGIDBiMap[-EvtGenName2PDGIDBiMap[name]] except: - return 'ChargeConj({0})'.format(name) + return "ChargeConj({0})".format(name) diff --git a/decaylanguage/utils/utilities.py b/decaylanguage/utils/utilities.py index afef23c9..9c1d2a94 100644 --- a/decaylanguage/utils/utilities.py +++ b/decaylanguage/utils/utilities.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -9,9 +10,9 @@ def iter_flatten(iterable): - ''' + """ Flatten nested tuples and lists - ''' + """ for e in iterable: if isinstance(e, (list, tuple)): for f in iter_flatten(e): @@ -21,36 +22,38 @@ def iter_flatten(iterable): def split(x): - ''' + """ Break up a comma separated list, but respect curly brackets. For example: this, that { that { this, that } } would only break on the first comma, since the second is in a {} list - ''' + """ c = 0 i = 0 out = [] while len(x) > 0: if i + 1 == len(x): - out.append(x[:i + 1]) + out.append(x[: i + 1]) return out - elif (x[i] == ',' and c == 0): + elif x[i] == "," and c == 0: out.append(x[:i]) - x = x[i + 1:] + x = x[i + 1 :] i = -1 - elif x[i] == '{': + elif x[i] == "{": c += 1 - elif x[i] == '}': + elif x[i] == "}": c -= 1 i += 1 def filter_lines(matcher, input): - ''' + """ Filter out lines into new variable if they match a regular expression - ''' - output = [matcher.match(l).groupdict() for l in input if matcher.match(l) is not None] + """ + output = [ + matcher.match(l).groupdict() for l in input if matcher.match(l) is not None + ] input = [l for l in input if matcher.match(l) is None] return output, input diff --git a/docs/conf.py b/docs/conf.py index 192df571..7f1d9d76 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,48 +5,48 @@ extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.coverage', - 'sphinx.ext.doctest', - 'sphinx.ext.extlinks', - 'sphinx.ext.ifconfig', - 'sphinx.ext.napoleon', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.extlinks", + "sphinx.ext.ifconfig", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", ] -if os.getenv('SPELLCHECK'): - extensions += 'sphinxcontrib.spelling', +if os.getenv("SPELLCHECK"): + extensions += ("sphinxcontrib.spelling",) spelling_show_suggestions = True - spelling_lang = 'en_US' - -source_suffix = '.rst' -master_doc = 'index' -project = 'decaylanguage' -year = '2018' -author = 'Henry Fredrick Schreiner III' -copyright = '{0}, {1}'.format(year, author) -version = release = '0.2.0' - -pygments_style = 'trac' -templates_path = ['.'] + spelling_lang = "en_US" + +source_suffix = ".rst" +master_doc = "index" +project = "decaylanguage" +year = "2018" +author = "Henry Fredrick Schreiner III" +copyright = "{0}, {1}".format(year, author) +version = release = "0.2.0" + +pygments_style = "trac" +templates_path = ["."] extlinks = { - 'issue': ('https://github.com/scikit-hep/decaylanguage/issues/%s', '#'), - 'pr': ('https://github.com/scikit-hep/decaylanguage/pull/%s', 'PR #'), + "issue": ("https://github.com/scikit-hep/decaylanguage/issues/%s", "#"), + "pr": ("https://github.com/scikit-hep/decaylanguage/pull/%s", "PR #"), } # on_rtd is whether we are on readthedocs.org -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' +on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: # only set the theme if we're building docs locally - html_theme = 'sphinx_rtd_theme' + html_theme = "sphinx_rtd_theme" html_use_smartypants = True -html_last_updated_fmt = '%b %d, %Y' +html_last_updated_fmt = "%b %d, %Y" html_split_index = False html_sidebars = { - '**': ['searchbox.html', 'globaltoc.html', 'sourcelink.html'], + "**": ["searchbox.html", "globaltoc.html", "sourcelink.html"], } -html_short_title = '%s-%s' % (project, version) +html_short_title = "%s-%s" % (project, version) napoleon_use_ivar = True napoleon_use_rtype = False diff --git a/docs/index.rst b/docs/index.rst index 40f35b5e..ad842d51 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,4 +19,3 @@ Indices and tables * :ref:`genindex` * :ref:`modindex` * :ref:`search` - diff --git a/docs/installation.rst b/docs/installation.rst index 98ed0bb1..e24a9f6c 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -9,5 +9,3 @@ At the command line:: Or:: pip install https://github.com/scikit-hep/decaylanguage.git - - diff --git a/docs/requirements.txt b/docs/requirements.txt index 8a8a4047..46ae74e9 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,3 @@ +-e . sphinx>=1.3 sphinx-rtd-theme --e . diff --git a/models/DtoKpipipi_v2.txt b/models/DtoKpipipi_v2.txt index 9fa51a51..ee8d2375 100644 --- a/models/DtoKpipipi_v2.txt +++ b/models/DtoKpipipi_v2.txt @@ -10,215 +10,215 @@ K(1)(1270)bar-::Spline::Min 0.6 K(1)(1270)bar-::Spline::Max 3 K(1)(1270)bar-::Spline::N 40 -D0[D]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 2 1 0 2 0 0 -D0[D]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.648936 0.0205762 0 -0.271637 0.0342107 -D0[P]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 0 0.362058 0.00237314 0 -1.79607 0.00663691 -D0[P]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.642781 0.00570074 0 1.69828 0.00900026 -D0{K(1)(1270)bar-,pi+} 0 0.361958 0.00377983 0 1.99329 0.0132565 -D0{K(1)(1400)bar-{K*(892)bar0{K-,pi+},pi-},pi+} 0 0.127477 0.00221089 0 -2.96395 0.0192276 -D0{K(1460)bar-,pi+} 0 0.121928 0.00232671 0 3.01374 0.0388723 -D0{K(2)*(1430)bar-{K*(892)bar0{K-,pi+},pi-},pi+} 0 0.301933 0.0039873 0 -1.35594 0.012779 -D0{K*(892)bar0{K-,pi+},PiPi10} 2 1 0 2 0 0 -D0{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 0 0.196037 0.0012135 0 -0.390311 0.00629977 -D0{KPi00,PiPi00} 2 1 0 2 0 0 -D0{a(1)(1260)+,K-} 0 0.813449 0.00586375 0 -2.60325 0.00790284 -D0{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.161918 0.00517933 0 -1.50312 0.0323255 -D0{rho(770)0{pi+,pi-},KPi10} 0 0.337933 0.00605558 0 1.27493 0.0138771 -K(1)(1270)bar-[D;GSpline.EFF]{K*(892)bar0{K-,pi+},pi-} 0 0.76873 0.0205573 0 -0.336606 0.0281962 -K(1)(1270)bar-[GSpline.EFF]{K*(892)bar0{K-,pi+},pi-} 0 0.387786 0.00688031 0 -3.01203 0.0189631 -K(1)(1270)bar-[GSpline.EFF]{KPi20[FOCUS.Kpi]{K-,pi+},pi-} 0 0.553577 0.0103218 0 0.927996 0.0186323 -K(1)(1270)bar-[GSpline.EFF]{omega(782)0{pi+,pi-},K-} 0 0.146482 0.00542024 0 0.157787 0.0369024 -K(1)(1270)bar-[GSpline.EFF]{rho(1450)0{pi+,pi-},K-} 0 2.01551 0.0260671 0 -2.08574 0.0149467 -K(1)(1270)bar-[GSpline.EFF]{rho(770)0{pi+,pi-},K-} 2 1 0 2 0 0 -K(1460)bar-[GSpline.EFF]{K*(892)bar0{K-,pi+},pi-} 2 1 0 2 0 0 -K(1460)bar-[GSpline.EFF]{PiPi30,K-} 2 1 0 2 0 0 -KPi00[FOCUS.I32]{K-,pi+} 0 0.869988 0.0102238 0 -2.60381 0.0124354 -KPi00[FOCUS.KEta]{K-,pi+} 0 2.6139 0.14138 0 -0.332883 0.0421308 -KPi00[FOCUS.Kpi]{K-,pi+} 2 1 0 2 0 0 -KPi10[FOCUS.I32]{K-,pi+} 0 1.07313 0.00826134 0 -2.28387 0.00797464 -KPi10[FOCUS.Kpi]{K-,pi+} 2 1 0 2 0 0 -PiPi00[kMatrix.pole.1]{pi+,pi-} 0 0.55373 0.00858738 0 0.616282 0.0115494 -PiPi00[kMatrix.prod.0]{pi+,pi-} 0 0.0816985 0.00131264 0 -2.56548 0.0125314 -PiPi10[kMatrix.pole.1]{pi+,pi-} 0 0.305436 0.0110739 0 1.14414 0.0267795 -PiPi10[kMatrix.prod.0]{pi+,pi-} 0 0.260569 0.00501229 0 -2.60095 0.0164546 -PiPi20[kMatrix.pole.0]{pi+,pi-} 0 0.290881 0.00657536 0 2.89408 0.0231331 -PiPi20[kMatrix.pole.1]{pi+,pi-} 0 0.991497 0.0181081 0 -0.387198 0.0182221 -PiPi20[kMatrix.prod.0]{pi+,pi-} 0 0.117233 0.00224302 0 2.97581 0.0215617 -PiPi30[kMatrix.pole.0]{pi+,pi-} 0 0.314652 0.00999844 0 0.815669 0.0339684 -PiPi30[kMatrix.pole.1]{pi+,pi-} 0 0.813302 0.0322292 0 1.96997 0.0445942 -PiPi30[kMatrix.prod.1]{pi+,pi-} 0 1.81891 0.0592467 0 -1.41006 0.0388266 -a(1)(1260)+[D;GSpline.EFF]{rho(770)0{pi+,pi-},pi+} 0 0.582157 0.0110043 0 -2.66737 0.0208508 -a(1)(1260)+[GSpline.EFF]{PiPi20,pi+} 2 1 0 2 0 0 -a(1)(1260)+[GSpline.EFF]{rho(770)0{pi+,pi-},pi+} 2 1 0 2 0 0 -D0_radius 2 0.0037559 0 -IS_p1_4pi 2 0 0 -IS_p1_EtaEta 2 -0.39899 0 -IS_p1_EtapEta 2 -0.34639 0 -IS_p1_KK 2 -0.55377 0 -IS_p1_mass 2 0.651 0 -IS_p1_pipi 2 0.22889 0 -IS_p2_4pi 2 0 0 -IS_p2_EtaEta 2 0.39065 0 -IS_p2_EtapEta 2 0.31503 0 -IS_p2_KK 2 0.55095 0 -IS_p2_mass 2 1.2036 0 -IS_p2_pipi 2 0.94128 0 -IS_p3_4pi 2 0.55639 0 -IS_p3_EtaEta 2 0.1834 0 -IS_p3_EtapEta 2 0.18681 0 -IS_p3_KK 2 0.23888 0 -IS_p3_mass 2 1.55817 0 -IS_p3_pipi 2 0.36856 0 -IS_p4_4pi 2 0.85679 0 -IS_p4_EtaEta 2 0.19906 0 -IS_p4_EtapEta 2 -0.00984 0 -IS_p4_KK 2 0.40907 0 -IS_p4_mass 2 1.21 0 -IS_p4_pipi 2 0.3365 0 -IS_p5_4pi 2 -0.79658 0 -IS_p5_EtaEta 2 -0.00355 0 -IS_p5_EtapEta 2 0.22358 0 -IS_p5_KK 2 -0.17558 0 -IS_p5_mass 2 1.82206 0 -IS_p5_pipi 2 0.18171 0 -K(1)(1270)bar-::Spline::Gamma::0 2 6.62044e-09 0 -K(1)(1270)bar-::Spline::Gamma::1 2 8.1034e-05 0 -K(1)(1270)bar-::Spline::Gamma::10 2 0.142193 0 -K(1)(1270)bar-::Spline::Gamma::11 2 0.182565 0 -K(1)(1270)bar-::Spline::Gamma::12 2 0.231309 0 -K(1)(1270)bar-::Spline::Gamma::13 2 0.295139 0 -K(1)(1270)bar-::Spline::Gamma::14 2 0.383415 0 -K(1)(1270)bar-::Spline::Gamma::15 2 0.507206 0 -K(1)(1270)bar-::Spline::Gamma::16 2 0.673751 0 -K(1)(1270)bar-::Spline::Gamma::17 2 0.918665 0 -K(1)(1270)bar-::Spline::Gamma::18 2 1.18142 0 -K(1)(1270)bar-::Spline::Gamma::19 2 1.41125 0 -K(1)(1270)bar-::Spline::Gamma::2 2 0.000601093 0 -K(1)(1270)bar-::Spline::Gamma::20 2 1.61709 0 -K(1)(1270)bar-::Spline::Gamma::21 2 1.80236 0 -K(1)(1270)bar-::Spline::Gamma::22 2 1.97044 0 -K(1)(1270)bar-::Spline::Gamma::23 2 2.12449 0 -K(1)(1270)bar-::Spline::Gamma::24 2 2.26729 0 -K(1)(1270)bar-::Spline::Gamma::25 2 2.40124 0 -K(1)(1270)bar-::Spline::Gamma::26 2 2.52841 0 -K(1)(1270)bar-::Spline::Gamma::27 2 2.65063 0 -K(1)(1270)bar-::Spline::Gamma::28 2 2.76952 0 -K(1)(1270)bar-::Spline::Gamma::29 2 2.88658 0 -K(1)(1270)bar-::Spline::Gamma::3 2 0.00199416 0 -K(1)(1270)bar-::Spline::Gamma::30 2 3.0032 0 -K(1)(1270)bar-::Spline::Gamma::31 2 3.12073 0 -K(1)(1270)bar-::Spline::Gamma::32 2 3.24046 0 -K(1)(1270)bar-::Spline::Gamma::33 2 3.36373 0 -K(1)(1270)bar-::Spline::Gamma::34 2 3.49188 0 -K(1)(1270)bar-::Spline::Gamma::35 2 3.62634 0 -K(1)(1270)bar-::Spline::Gamma::36 2 3.76865 0 -K(1)(1270)bar-::Spline::Gamma::37 2 3.92048 0 -K(1)(1270)bar-::Spline::Gamma::38 2 4.08365 0 -K(1)(1270)bar-::Spline::Gamma::39 2 4.26015 0 -K(1)(1270)bar-::Spline::Gamma::4 2 0.00478532 0 -K(1)(1270)bar-::Spline::Gamma::5 2 0.0097556 0 -K(1)(1270)bar-::Spline::Gamma::6 2 0.0184164 0 -K(1)(1270)bar-::Spline::Gamma::7 2 0.0348721 0 -K(1)(1270)bar-::Spline::Gamma::8 2 0.0672518 0 -K(1)(1270)bar-::Spline::Gamma::9 2 0.105115 0 -K(1)(1270)bar-_mass 0 1289.81 0.557988 -K(1)(1270)bar-_radius 2 0.0017 0 -K(1)(1270)bar-_width 0 116.114 1.6492 -K(1460)bar-::Spline::Gamma::0 2 8.29869e-06 0 -K(1460)bar-::Spline::Gamma::1 2 0.00482634 0 -K(1460)bar-::Spline::Gamma::10 2 0.162536 0 -K(1460)bar-::Spline::Gamma::11 2 0.195329 0 -K(1460)bar-::Spline::Gamma::12 2 0.232727 0 -K(1460)bar-::Spline::Gamma::13 2 0.273932 0 -K(1460)bar-::Spline::Gamma::14 2 0.318217 0 -K(1460)bar-::Spline::Gamma::15 2 0.364931 0 -K(1460)bar-::Spline::Gamma::16 2 0.413518 0 -K(1460)bar-::Spline::Gamma::17 2 0.463526 0 -K(1460)bar-::Spline::Gamma::18 2 0.514629 0 -K(1460)bar-::Spline::Gamma::19 2 0.566657 0 -K(1460)bar-::Spline::Gamma::2 2 0.0176071 0 -K(1460)bar-::Spline::Gamma::20 2 0.619642 0 -K(1460)bar-::Spline::Gamma::21 2 0.67389 0 -K(1460)bar-::Spline::Gamma::22 2 0.730087 0 -K(1460)bar-::Spline::Gamma::23 2 0.789463 0 -K(1460)bar-::Spline::Gamma::24 2 0.853918 0 -K(1460)bar-::Spline::Gamma::25 2 0.925529 0 -K(1460)bar-::Spline::Gamma::26 2 0.999944 0 -K(1460)bar-::Spline::Gamma::27 2 1.05939 0 -K(1460)bar-::Spline::Gamma::28 2 1.11703 0 -K(1460)bar-::Spline::Gamma::29 2 1.17704 0 -K(1460)bar-::Spline::Gamma::3 2 0.0349086 0 -K(1460)bar-::Spline::Gamma::30 2 1.2408 0 -K(1460)bar-::Spline::Gamma::31 2 1.30884 0 -K(1460)bar-::Spline::Gamma::32 2 1.37941 0 -K(1460)bar-::Spline::Gamma::33 2 1.4487 0 -K(1460)bar-::Spline::Gamma::34 2 1.51729 0 -K(1460)bar-::Spline::Gamma::35 2 1.58528 0 -K(1460)bar-::Spline::Gamma::36 2 1.6526 0 -K(1460)bar-::Spline::Gamma::37 2 1.71901 0 -K(1460)bar-::Spline::Gamma::38 2 1.78418 0 -K(1460)bar-::Spline::Gamma::39 2 1.8476 0 -K(1460)bar-::Spline::Gamma::4 2 0.0532389 0 -K(1460)bar-::Spline::Gamma::5 2 0.0703992 0 -K(1460)bar-::Spline::Gamma::6 2 0.0855373 0 -K(1460)bar-::Spline::Gamma::7 2 0.0991271 0 -K(1460)bar-::Spline::Gamma::8 2 0.114209 0 -K(1460)bar-::Spline::Gamma::9 2 0.135205 0 -K(1460)bar-_mass 0 1482.4 3.57585 -K(1460)bar-_radius 2 0.0017 0 -K(1460)bar-_width 0 335.595 6.19588 -PiPi00_s0_prod 2 -0.196872 0 -PiPi10_s0_prod 2 -0.950027 0 -PiPi20_s0_prod 2 -0.165753 0 -PiPi30_s0_prod 2 -0.0676736 0 -a(1)(1260)+::Spline::Gamma::0 2 1.23936e-06 0 -a(1)(1260)+::Spline::Gamma::1 2 0.000223871 0 -a(1)(1260)+::Spline::Gamma::10 2 0.0832521 0 -a(1)(1260)+::Spline::Gamma::11 2 0.115406 0 -a(1)(1260)+::Spline::Gamma::12 2 0.159329 0 -a(1)(1260)+::Spline::Gamma::13 2 0.218726 0 -a(1)(1260)+::Spline::Gamma::14 2 0.295241 0 -a(1)(1260)+::Spline::Gamma::15 2 0.384295 0 -a(1)(1260)+::Spline::Gamma::16 2 0.475641 0 -a(1)(1260)+::Spline::Gamma::17 2 0.560491 0 -a(1)(1260)+::Spline::Gamma::18 2 0.635169 0 -a(1)(1260)+::Spline::Gamma::19 2 0.699435 0 -a(1)(1260)+::Spline::Gamma::2 2 0.00119329 0 -a(1)(1260)+::Spline::Gamma::20 2 0.754352 0 -a(1)(1260)+::Spline::Gamma::21 2 0.801255 0 -a(1)(1260)+::Spline::Gamma::22 2 0.841402 0 -a(1)(1260)+::Spline::Gamma::23 2 0.875894 0 -a(1)(1260)+::Spline::Gamma::24 2 0.905694 0 -a(1)(1260)+::Spline::Gamma::25 2 0.931693 0 -a(1)(1260)+::Spline::Gamma::26 2 0.954723 0 -a(1)(1260)+::Spline::Gamma::27 2 0.97539 0 -a(1)(1260)+::Spline::Gamma::28 2 0.994175 0 -a(1)(1260)+::Spline::Gamma::29 2 1.01148 0 -a(1)(1260)+::Spline::Gamma::3 2 0.00326416 0 -a(1)(1260)+::Spline::Gamma::30 2 1.02765 0 -a(1)(1260)+::Spline::Gamma::31 2 1.04297 0 -a(1)(1260)+::Spline::Gamma::32 2 1.05768 0 -a(1)(1260)+::Spline::Gamma::33 2 1.07198 0 -a(1)(1260)+::Spline::Gamma::34 2 1.08602 0 -a(1)(1260)+::Spline::Gamma::35 2 1.09995 0 -a(1)(1260)+::Spline::Gamma::36 2 1.11387 0 -a(1)(1260)+::Spline::Gamma::37 2 1.12789 0 -a(1)(1260)+::Spline::Gamma::38 2 1.14208 0 -a(1)(1260)+::Spline::Gamma::39 2 1.15651 0 -a(1)(1260)+::Spline::Gamma::4 2 0.00671647 0 -a(1)(1260)+::Spline::Gamma::5 2 0.0118496 0 -a(1)(1260)+::Spline::Gamma::6 2 0.0190462 0 -a(1)(1260)+::Spline::Gamma::7 2 0.0288353 0 -a(1)(1260)+::Spline::Gamma::8 2 0.0419745 0 -a(1)(1260)+::Spline::Gamma::9 2 0.0595699 0 -a(1)(1260)+_mass 0 1195.05 1.04514 -a(1)(1260)+_radius 2 0.0017 0 -a(1)(1260)+_width 0 422.013 2.0958 -f_scatt0 2 0.23399 0 -f_scatt1 2 0.15044 0 -f_scatt2 2 -0.20545 0 -f_scatt3 2 0.32825 0 -f_scatt4 2 0.35412 0 -s0_prod 2 -1 0 -s0_scatt 2 -3.92637 0 -sA 2 1 0 +D0[D]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 2 1 0 2 0 0 +D0[D]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.648936 0.0205762 0 -0.271637 0.0342107 +D0[P]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 0 0.362058 0.00237314 0 -1.79607 0.00663691 +D0[P]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.642781 0.00570074 0 1.69828 0.00900026 +D0{K(1)(1270)bar-,pi+} 0 0.361958 0.00377983 0 1.99329 0.0132565 +D0{K(1)(1400)bar-{K*(892)bar0{K-,pi+},pi-},pi+} 0 0.127477 0.00221089 0 -2.96395 0.0192276 +D0{K(1460)bar-,pi+} 0 0.121928 0.00232671 0 3.01374 0.0388723 +D0{K(2)*(1430)bar-{K*(892)bar0{K-,pi+},pi-},pi+} 0 0.301933 0.0039873 0 -1.35594 0.012779 +D0{K*(892)bar0{K-,pi+},PiPi10} 2 1 0 2 0 0 +D0{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 0 0.196037 0.0012135 0 -0.390311 0.00629977 +D0{KPi00,PiPi00} 2 1 0 2 0 0 +D0{a(1)(1260)+,K-} 0 0.813449 0.00586375 0 -2.60325 0.00790284 +D0{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.161918 0.00517933 0 -1.50312 0.0323255 +D0{rho(770)0{pi+,pi-},KPi10} 0 0.337933 0.00605558 0 1.27493 0.0138771 +K(1)(1270)bar-[D;GSpline.EFF]{K*(892)bar0{K-,pi+},pi-} 0 0.76873 0.0205573 0 -0.336606 0.0281962 +K(1)(1270)bar-[GSpline.EFF]{K*(892)bar0{K-,pi+},pi-} 0 0.387786 0.00688031 0 -3.01203 0.0189631 +K(1)(1270)bar-[GSpline.EFF]{KPi20[FOCUS.Kpi]{K-,pi+},pi-} 0 0.553577 0.0103218 0 0.927996 0.0186323 +K(1)(1270)bar-[GSpline.EFF]{omega(782)0{pi+,pi-},K-} 0 0.146482 0.00542024 0 0.157787 0.0369024 +K(1)(1270)bar-[GSpline.EFF]{rho(1450)0{pi+,pi-},K-} 0 2.01551 0.0260671 0 -2.08574 0.0149467 +K(1)(1270)bar-[GSpline.EFF]{rho(770)0{pi+,pi-},K-} 2 1 0 2 0 0 +K(1460)bar-[GSpline.EFF]{K*(892)bar0{K-,pi+},pi-} 2 1 0 2 0 0 +K(1460)bar-[GSpline.EFF]{PiPi30,K-} 2 1 0 2 0 0 +KPi00[FOCUS.I32]{K-,pi+} 0 0.869988 0.0102238 0 -2.60381 0.0124354 +KPi00[FOCUS.KEta]{K-,pi+} 0 2.6139 0.14138 0 -0.332883 0.0421308 +KPi00[FOCUS.Kpi]{K-,pi+} 2 1 0 2 0 0 +KPi10[FOCUS.I32]{K-,pi+} 0 1.07313 0.00826134 0 -2.28387 0.00797464 +KPi10[FOCUS.Kpi]{K-,pi+} 2 1 0 2 0 0 +PiPi00[kMatrix.pole.1]{pi+,pi-} 0 0.55373 0.00858738 0 0.616282 0.0115494 +PiPi00[kMatrix.prod.0]{pi+,pi-} 0 0.0816985 0.00131264 0 -2.56548 0.0125314 +PiPi10[kMatrix.pole.1]{pi+,pi-} 0 0.305436 0.0110739 0 1.14414 0.0267795 +PiPi10[kMatrix.prod.0]{pi+,pi-} 0 0.260569 0.00501229 0 -2.60095 0.0164546 +PiPi20[kMatrix.pole.0]{pi+,pi-} 0 0.290881 0.00657536 0 2.89408 0.0231331 +PiPi20[kMatrix.pole.1]{pi+,pi-} 0 0.991497 0.0181081 0 -0.387198 0.0182221 +PiPi20[kMatrix.prod.0]{pi+,pi-} 0 0.117233 0.00224302 0 2.97581 0.0215617 +PiPi30[kMatrix.pole.0]{pi+,pi-} 0 0.314652 0.00999844 0 0.815669 0.0339684 +PiPi30[kMatrix.pole.1]{pi+,pi-} 0 0.813302 0.0322292 0 1.96997 0.0445942 +PiPi30[kMatrix.prod.1]{pi+,pi-} 0 1.81891 0.0592467 0 -1.41006 0.0388266 +a(1)(1260)+[D;GSpline.EFF]{rho(770)0{pi+,pi-},pi+} 0 0.582157 0.0110043 0 -2.66737 0.0208508 +a(1)(1260)+[GSpline.EFF]{PiPi20,pi+} 2 1 0 2 0 0 +a(1)(1260)+[GSpline.EFF]{rho(770)0{pi+,pi-},pi+} 2 1 0 2 0 0 +D0_radius 2 0.0037559 0 +IS_p1_4pi 2 0 0 +IS_p1_EtaEta 2 -0.39899 0 +IS_p1_EtapEta 2 -0.34639 0 +IS_p1_KK 2 -0.55377 0 +IS_p1_mass 2 0.651 0 +IS_p1_pipi 2 0.22889 0 +IS_p2_4pi 2 0 0 +IS_p2_EtaEta 2 0.39065 0 +IS_p2_EtapEta 2 0.31503 0 +IS_p2_KK 2 0.55095 0 +IS_p2_mass 2 1.2036 0 +IS_p2_pipi 2 0.94128 0 +IS_p3_4pi 2 0.55639 0 +IS_p3_EtaEta 2 0.1834 0 +IS_p3_EtapEta 2 0.18681 0 +IS_p3_KK 2 0.23888 0 +IS_p3_mass 2 1.55817 0 +IS_p3_pipi 2 0.36856 0 +IS_p4_4pi 2 0.85679 0 +IS_p4_EtaEta 2 0.19906 0 +IS_p4_EtapEta 2 -0.00984 0 +IS_p4_KK 2 0.40907 0 +IS_p4_mass 2 1.21 0 +IS_p4_pipi 2 0.3365 0 +IS_p5_4pi 2 -0.79658 0 +IS_p5_EtaEta 2 -0.00355 0 +IS_p5_EtapEta 2 0.22358 0 +IS_p5_KK 2 -0.17558 0 +IS_p5_mass 2 1.82206 0 +IS_p5_pipi 2 0.18171 0 +K(1)(1270)bar-::Spline::Gamma::0 2 6.62044e-09 0 +K(1)(1270)bar-::Spline::Gamma::1 2 8.1034e-05 0 +K(1)(1270)bar-::Spline::Gamma::10 2 0.142193 0 +K(1)(1270)bar-::Spline::Gamma::11 2 0.182565 0 +K(1)(1270)bar-::Spline::Gamma::12 2 0.231309 0 +K(1)(1270)bar-::Spline::Gamma::13 2 0.295139 0 +K(1)(1270)bar-::Spline::Gamma::14 2 0.383415 0 +K(1)(1270)bar-::Spline::Gamma::15 2 0.507206 0 +K(1)(1270)bar-::Spline::Gamma::16 2 0.673751 0 +K(1)(1270)bar-::Spline::Gamma::17 2 0.918665 0 +K(1)(1270)bar-::Spline::Gamma::18 2 1.18142 0 +K(1)(1270)bar-::Spline::Gamma::19 2 1.41125 0 +K(1)(1270)bar-::Spline::Gamma::2 2 0.000601093 0 +K(1)(1270)bar-::Spline::Gamma::20 2 1.61709 0 +K(1)(1270)bar-::Spline::Gamma::21 2 1.80236 0 +K(1)(1270)bar-::Spline::Gamma::22 2 1.97044 0 +K(1)(1270)bar-::Spline::Gamma::23 2 2.12449 0 +K(1)(1270)bar-::Spline::Gamma::24 2 2.26729 0 +K(1)(1270)bar-::Spline::Gamma::25 2 2.40124 0 +K(1)(1270)bar-::Spline::Gamma::26 2 2.52841 0 +K(1)(1270)bar-::Spline::Gamma::27 2 2.65063 0 +K(1)(1270)bar-::Spline::Gamma::28 2 2.76952 0 +K(1)(1270)bar-::Spline::Gamma::29 2 2.88658 0 +K(1)(1270)bar-::Spline::Gamma::3 2 0.00199416 0 +K(1)(1270)bar-::Spline::Gamma::30 2 3.0032 0 +K(1)(1270)bar-::Spline::Gamma::31 2 3.12073 0 +K(1)(1270)bar-::Spline::Gamma::32 2 3.24046 0 +K(1)(1270)bar-::Spline::Gamma::33 2 3.36373 0 +K(1)(1270)bar-::Spline::Gamma::34 2 3.49188 0 +K(1)(1270)bar-::Spline::Gamma::35 2 3.62634 0 +K(1)(1270)bar-::Spline::Gamma::36 2 3.76865 0 +K(1)(1270)bar-::Spline::Gamma::37 2 3.92048 0 +K(1)(1270)bar-::Spline::Gamma::38 2 4.08365 0 +K(1)(1270)bar-::Spline::Gamma::39 2 4.26015 0 +K(1)(1270)bar-::Spline::Gamma::4 2 0.00478532 0 +K(1)(1270)bar-::Spline::Gamma::5 2 0.0097556 0 +K(1)(1270)bar-::Spline::Gamma::6 2 0.0184164 0 +K(1)(1270)bar-::Spline::Gamma::7 2 0.0348721 0 +K(1)(1270)bar-::Spline::Gamma::8 2 0.0672518 0 +K(1)(1270)bar-::Spline::Gamma::9 2 0.105115 0 +K(1)(1270)bar-_mass 0 1289.81 0.557988 +K(1)(1270)bar-_radius 2 0.0017 0 +K(1)(1270)bar-_width 0 116.114 1.6492 +K(1460)bar-::Spline::Gamma::0 2 8.29869e-06 0 +K(1460)bar-::Spline::Gamma::1 2 0.00482634 0 +K(1460)bar-::Spline::Gamma::10 2 0.162536 0 +K(1460)bar-::Spline::Gamma::11 2 0.195329 0 +K(1460)bar-::Spline::Gamma::12 2 0.232727 0 +K(1460)bar-::Spline::Gamma::13 2 0.273932 0 +K(1460)bar-::Spline::Gamma::14 2 0.318217 0 +K(1460)bar-::Spline::Gamma::15 2 0.364931 0 +K(1460)bar-::Spline::Gamma::16 2 0.413518 0 +K(1460)bar-::Spline::Gamma::17 2 0.463526 0 +K(1460)bar-::Spline::Gamma::18 2 0.514629 0 +K(1460)bar-::Spline::Gamma::19 2 0.566657 0 +K(1460)bar-::Spline::Gamma::2 2 0.0176071 0 +K(1460)bar-::Spline::Gamma::20 2 0.619642 0 +K(1460)bar-::Spline::Gamma::21 2 0.67389 0 +K(1460)bar-::Spline::Gamma::22 2 0.730087 0 +K(1460)bar-::Spline::Gamma::23 2 0.789463 0 +K(1460)bar-::Spline::Gamma::24 2 0.853918 0 +K(1460)bar-::Spline::Gamma::25 2 0.925529 0 +K(1460)bar-::Spline::Gamma::26 2 0.999944 0 +K(1460)bar-::Spline::Gamma::27 2 1.05939 0 +K(1460)bar-::Spline::Gamma::28 2 1.11703 0 +K(1460)bar-::Spline::Gamma::29 2 1.17704 0 +K(1460)bar-::Spline::Gamma::3 2 0.0349086 0 +K(1460)bar-::Spline::Gamma::30 2 1.2408 0 +K(1460)bar-::Spline::Gamma::31 2 1.30884 0 +K(1460)bar-::Spline::Gamma::32 2 1.37941 0 +K(1460)bar-::Spline::Gamma::33 2 1.4487 0 +K(1460)bar-::Spline::Gamma::34 2 1.51729 0 +K(1460)bar-::Spline::Gamma::35 2 1.58528 0 +K(1460)bar-::Spline::Gamma::36 2 1.6526 0 +K(1460)bar-::Spline::Gamma::37 2 1.71901 0 +K(1460)bar-::Spline::Gamma::38 2 1.78418 0 +K(1460)bar-::Spline::Gamma::39 2 1.8476 0 +K(1460)bar-::Spline::Gamma::4 2 0.0532389 0 +K(1460)bar-::Spline::Gamma::5 2 0.0703992 0 +K(1460)bar-::Spline::Gamma::6 2 0.0855373 0 +K(1460)bar-::Spline::Gamma::7 2 0.0991271 0 +K(1460)bar-::Spline::Gamma::8 2 0.114209 0 +K(1460)bar-::Spline::Gamma::9 2 0.135205 0 +K(1460)bar-_mass 0 1482.4 3.57585 +K(1460)bar-_radius 2 0.0017 0 +K(1460)bar-_width 0 335.595 6.19588 +PiPi00_s0_prod 2 -0.196872 0 +PiPi10_s0_prod 2 -0.950027 0 +PiPi20_s0_prod 2 -0.165753 0 +PiPi30_s0_prod 2 -0.0676736 0 +a(1)(1260)+::Spline::Gamma::0 2 1.23936e-06 0 +a(1)(1260)+::Spline::Gamma::1 2 0.000223871 0 +a(1)(1260)+::Spline::Gamma::10 2 0.0832521 0 +a(1)(1260)+::Spline::Gamma::11 2 0.115406 0 +a(1)(1260)+::Spline::Gamma::12 2 0.159329 0 +a(1)(1260)+::Spline::Gamma::13 2 0.218726 0 +a(1)(1260)+::Spline::Gamma::14 2 0.295241 0 +a(1)(1260)+::Spline::Gamma::15 2 0.384295 0 +a(1)(1260)+::Spline::Gamma::16 2 0.475641 0 +a(1)(1260)+::Spline::Gamma::17 2 0.560491 0 +a(1)(1260)+::Spline::Gamma::18 2 0.635169 0 +a(1)(1260)+::Spline::Gamma::19 2 0.699435 0 +a(1)(1260)+::Spline::Gamma::2 2 0.00119329 0 +a(1)(1260)+::Spline::Gamma::20 2 0.754352 0 +a(1)(1260)+::Spline::Gamma::21 2 0.801255 0 +a(1)(1260)+::Spline::Gamma::22 2 0.841402 0 +a(1)(1260)+::Spline::Gamma::23 2 0.875894 0 +a(1)(1260)+::Spline::Gamma::24 2 0.905694 0 +a(1)(1260)+::Spline::Gamma::25 2 0.931693 0 +a(1)(1260)+::Spline::Gamma::26 2 0.954723 0 +a(1)(1260)+::Spline::Gamma::27 2 0.97539 0 +a(1)(1260)+::Spline::Gamma::28 2 0.994175 0 +a(1)(1260)+::Spline::Gamma::29 2 1.01148 0 +a(1)(1260)+::Spline::Gamma::3 2 0.00326416 0 +a(1)(1260)+::Spline::Gamma::30 2 1.02765 0 +a(1)(1260)+::Spline::Gamma::31 2 1.04297 0 +a(1)(1260)+::Spline::Gamma::32 2 1.05768 0 +a(1)(1260)+::Spline::Gamma::33 2 1.07198 0 +a(1)(1260)+::Spline::Gamma::34 2 1.08602 0 +a(1)(1260)+::Spline::Gamma::35 2 1.09995 0 +a(1)(1260)+::Spline::Gamma::36 2 1.11387 0 +a(1)(1260)+::Spline::Gamma::37 2 1.12789 0 +a(1)(1260)+::Spline::Gamma::38 2 1.14208 0 +a(1)(1260)+::Spline::Gamma::39 2 1.15651 0 +a(1)(1260)+::Spline::Gamma::4 2 0.00671647 0 +a(1)(1260)+::Spline::Gamma::5 2 0.0118496 0 +a(1)(1260)+::Spline::Gamma::6 2 0.0190462 0 +a(1)(1260)+::Spline::Gamma::7 2 0.0288353 0 +a(1)(1260)+::Spline::Gamma::8 2 0.0419745 0 +a(1)(1260)+::Spline::Gamma::9 2 0.0595699 0 +a(1)(1260)+_mass 0 1195.05 1.04514 +a(1)(1260)+_radius 2 0.0017 0 +a(1)(1260)+_width 0 422.013 2.0958 +f_scatt0 2 0.23399 0 +f_scatt1 2 0.15044 0 +f_scatt2 2 -0.20545 0 +f_scatt3 2 0.32825 0 +f_scatt4 2 0.35412 0 +s0_prod 2 -1 0 +s0_scatt 2 -3.92637 0 +sA 2 1 0 diff --git a/notebooks/DtoKpipipi_v2.cu b/notebooks/DtoKpipipi_v2.cu index 39f6e6c4..042ce4be 100644 --- a/notebooks/DtoKpipipi_v2.cu +++ b/notebooks/DtoKpipipi_v2.cu @@ -48,11 +48,11 @@ DtoV1S2_V1toP1P2_S2toP3P4 : SF_4Body.DtoVS_VtoP1P2_StoP3P4 SF_4Body.FF_12_34_L1 D0{rho(770)0{pi+,pi-},KPi10[FOCUS.Kpi]{K-,pi+}} - Scalar: KPi20 PiPi30 PiPi00 PiPi10 KPi10 PiPi20 KPi00 -PseudoScalar: D0 pi- pi+ K(1460)~- K- - Vector: omega(782)0 K(892)~0 rho(770)0 rho(1450)0 + Scalar: KPi20 PiPi30 PiPi00 PiPi10 KPi10 PiPi20 KPi00 +PseudoScalar: D0 pi- pi+ K(1460)~- K- + Vector: omega(782)0 K(892)~0 rho(770)0 rho(1450)0 Axial: K(1)(1270)~- a(1)(1260)+ K(1)(1400)~- - Tensor: K(2)~- + Tensor: K(2)~- PseudoTensor: Unknown: @@ -1535,5 +1535,3 @@ ONE 2}); DK3P_DI.amplitudes_B.push_back(amplitudes_list.back()); - - diff --git a/notebooks/simple_model.txt b/notebooks/simple_model.txt index df8a8b35..3623896f 100644 --- a/notebooks/simple_model.txt +++ b/notebooks/simple_model.txt @@ -1,8 +1,8 @@ EventType D0 K- pi+ pi+ pi- -D0[D]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 2 1 0 2 0 0 -D0[D]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.648936 0.0205762 0 -0.271637 0.0342107 -D0[P]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 0 0.362058 0.00237314 0 -1.79607 0.00663691 -D0[P]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.642781 0.00570074 0 1.69828 0.00900026 - -D0_radius 2 0.0037559 0 +D0[D]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 2 1 0 2 0 0 +D0[D]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.648936 0.0205762 0 -0.271637 0.0342107 +D0[P]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 0 0.362058 0.00237314 0 -1.79607 0.00663691 +D0[P]{rho(1450)0{pi+,pi-},K*(892)bar0{K-,pi+}} 0 0.642781 0.00570074 0 1.69828 0.00900026 + +D0_radius 2 0.0037559 0 diff --git a/setup.py b/setup.py index af84993e..31dbdd60 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# -*- encoding: utf-8 -*- +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -19,80 +19,74 @@ PYTHON_REQUIRES = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" INSTALL_REQUIRES = [ - 'attrs>=19.2', - 'plumbum>=1.6.9', - 'numpy>=1.12', - 'pandas>=0.22', - 'six>=1.11', - 'lark-parser>=0.8.0, <0.8.6', + "attrs>=19.2", + "plumbum>=1.6.9", + "numpy>=1.12", + "pandas>=0.22", + "six>=1.11", + "lark-parser>=0.8.0, <0.8.6", 'pathlib2>=2.3; python_version<"3.5"', 'enum34>=1.1; python_version<"3.4"', 'importlib_resources>=1.0; python_version<"3.7"', 'cachetools; python_version<"3.3"', - 'particle==0.13.*', - 'pydot' + "particle==0.14.*", + "pydot", ] + def read(*names, **kwargs): return io.open( os.path.join(os.path.dirname(__file__), *names), - encoding=kwargs.get('encoding', 'utf8') + encoding=kwargs.get("encoding", "utf8"), ).read() -def proc_readme(text): - text = text.split('')[-1] - return ''' -
- - ''' + text def get_version(): g = {} exec(open(os.path.join("decaylanguage", "_version.py")).read(), g) return g["__version__"] + extras = { - 'test': ['pytest'], - 'notebook': ['graphviz'], + "test": ["pytest"], + "notebook": ["graphviz"], } setup( - name = 'DecayLanguage', - author = 'Henry Fredrick Schreiner III, Eduardo Rodrigues', - author_email = 'henry.schreiner@cern.ch, eduardo.rodrigues@cern.ch', - maintainer = 'The Scikit-HEP admins', - maintainer_email = 'scikit-hep-admins@googlegroups.com', - version = get_version(), - license = 'BSD 3-Clause License', - description = 'A language to describe particle decays, and tools to work with them.', - long_description = proc_readme(read('README.md')) + '\n\n' + read('CHANGELOG.md'), - long_description_content_type = "text/markdown", - url = 'https://github.com/scikit-hep/decaylanguage', - packages = find_packages(exclude=("tests",)), - package_data = {'': ['data/*.*']}, - python_requires = PYTHON_REQUIRES, - install_requires = INSTALL_REQUIRES, - tests_require = extras['test'], - extras_require = extras, - keywords = [ - 'HEP', 'particle', 'decay', 'representation' - ], - classifiers = [ + name="DecayLanguage", + author="Henry Fredrick Schreiner III, Eduardo Rodrigues", + author_email="henry.schreiner@cern.ch, eduardo.rodrigues@cern.ch", + maintainer="The Scikit-HEP admins", + maintainer_email="scikit-hep-admins@googlegroups.com", + version=get_version(), + license="BSD 3-Clause License", + description="A language to describe particle decays, and tools to work with them.", + long_description=read("README.md"), + long_description_content_type="text/markdown", + url="https://github.com/scikit-hep/decaylanguage", + packages=find_packages(exclude=("tests",)), + package_data={"": ["data/*.*"]}, + python_requires=PYTHON_REQUIRES, + install_requires=INSTALL_REQUIRES, + tests_require=extras["test"], + extras_require=extras, + keywords=["HEP", "particle", "decay", "representation"], + classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Scientific/Engineering', + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering", ], - platforms = "Any", + platforms="Any", ) diff --git a/tests/dec/test_dec.py b/tests/dec/test_dec.py index 94cea7bd..f9dcb8e2 100644 --- a/tests/dec/test_dec.py +++ b/tests/dec/test_dec.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -39,16 +40,18 @@ def test_default_constructor(): p = DecFileParser() assert p is not None + def test_constructor_1_file(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") assert p is not None assert len(p._dec_file_names) == 1 def test_constructor_multiple_files(): - p = DecFileParser(DIR / '../data/test_Xicc2XicPiPi.dec', - DIR / '../data/test_Bc2BsPi_Bs2KK.dec') + p = DecFileParser( + DIR / "../data/test_Xicc2XicPiPi.dec", DIR / "../data/test_Bc2BsPi_Bs2KK.dec" + ) p.parse() assert len(p._dec_file_names) == 2 @@ -72,133 +75,146 @@ def test_from_string(): def test_unknown_decfile(): with pytest.raises(FileNotFoundError): - p = DecFileParser('non-existent.dec') + p = DecFileParser("non-existent.dec") def test_non_parsed_decfile(): with pytest.raises(DecFileNotParsed): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.list_decay_mother_names() def test_non_existent_decay(): with pytest.raises(DecayNotFound): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - p.list_decay_modes('XYZ') + p.list_decay_modes("XYZ") def test_default_grammar_loading(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") assert p.grammar is not None def test_explicit_grammar_loading(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') - p.load_grammar(DIR / '../../decaylanguage/data/decfile.lark') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") + p.load_grammar(DIR / "../../decaylanguage/data/decfile.lark") assert p.grammar_loaded is True def test_string_representation(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") - assert "n_decays" not in p.__str__() + assert "n_decays" not in p.__str__() - p.parse() - assert "n_decays=5" in p.__str__() + p.parse() + assert "n_decays=5" in p.__str__() def test_copydecay_statement_parsing(): - p = DecFileParser(DIR / '../data/test_CopyDecay_RemoveDecay.dec') + p = DecFileParser(DIR / "../data/test_CopyDecay_RemoveDecay.dec") p.parse() assert len(p.dict_decays2copy()) == 2 assert p.number_of_decays == 4 # 2 original + 2 copied - assert p.list_decay_modes('phi_copy') == p.list_decay_modes('phi') + assert p.list_decay_modes("phi_copy") == p.list_decay_modes("phi") def test_definitions_parsing(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() assert len(p.dict_definitions()) == 24 def test_aliases_parsing(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() assert len(p.dict_aliases()) == 132 def test_charge_conjugates_parsing(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() assert len(p.dict_charge_conjugates()) == 77 def test_pythia_definitions_parsing(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() - assert p.dict_pythia_definitions() == {'ParticleDecays:mixB': 'off', - 'Init:showChangedSettings': 'off', - 'Init:showChangedParticleData': 'off', - 'Next:numberShowEvent': 0.0} + assert p.dict_pythia_definitions() == { + "ParticleDecays:mixB": "off", + "Init:showChangedSettings": "off", + "Init:showChangedParticleData": "off", + "Next:numberShowEvent": 0.0, + } def test_jetset_definitions_parsing(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() - assert p.dict_jetset_definitions() == {'MSTU': {1: 0, 2: 0}, - 'PARU': {11: 0.001}, - 'MSTJ': {26: 0}, - 'PARJ': {21: 0.36}} + assert p.dict_jetset_definitions() == { + "MSTU": {1: 0, 2: 0}, + "PARU": {11: 0.001}, + "MSTJ": {26: 0}, + "PARJ": {21: 0.36}, + } def test_list_lineshape_definitions(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() - assert p.list_lineshape_definitions() == [(['D_1+', 'D*+', 'pi0'], 2), - (['D_1+', 'D*0', 'pi+'], 2), - (['D_1-', 'D*-', 'pi0'], 2), - (['D_1-', 'anti-D*0', 'pi-'], 2), - (['D_10', 'D*0', 'pi0'], 2), - (['D_10', 'D*+', 'pi-'], 2), - (['anti-D_10', 'anti-D*0', 'pi0'], 2), - (['anti-D_10', 'D*-', 'pi+'], 2)] + assert p.list_lineshape_definitions() == [ + (["D_1+", "D*+", "pi0"], 2), + (["D_1+", "D*0", "pi+"], 2), + (["D_1-", "D*-", "pi0"], 2), + (["D_1-", "anti-D*0", "pi-"], 2), + (["D_10", "D*0", "pi0"], 2), + (["D_10", "D*+", "pi-"], 2), + (["anti-D_10", "anti-D*0", "pi0"], 2), + (["anti-D_10", "D*-", "pi+"], 2), + ] def test_global_photos_flag(): - p = DecFileParser(DIR / '../data/defs-aliases-chargeconj.dec') + p = DecFileParser(DIR / "../data/defs-aliases-chargeconj.dec") p.parse() assert p.global_photos_flag() == True + def test_missing_global_photos_flag(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() assert p.global_photos_flag() == False + def test_list_charge_conjugate_decays(): - p = DecFileParser(DIR / '../data/test_Bd2DmTauNu_Dm23PiPi0_Tau2MuNu.dec') + p = DecFileParser(DIR / "../data/test_Bd2DmTauNu_Dm23PiPi0_Tau2MuNu.dec") p.parse() - assert p.list_charge_conjugate_decays() == ['MyD+', 'MyTau+', 'Mya_1-', 'anti-B0sig'] + assert p.list_charge_conjugate_decays() == [ + "MyD+", + "MyTau+", + "Mya_1-", + "anti-B0sig", + ] def test_simple_dec(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - assert p.list_decay_mother_names() == ['D*+', 'D*-', 'D0', 'D+', 'pi0'] + assert p.list_decay_mother_names() == ["D*+", "D*-", "D0", "D+", "pi0"] - assert p.list_decay_modes('D0') == [['K-', 'pi+']] + assert p.list_decay_modes("D0") == [["K-", "pi+"]] def test_with_missing_info(): @@ -213,23 +229,23 @@ def test_with_missing_info(): warnings.warn(msg) `` """ - p = DecFileParser(DIR / '../data/test_Xicc2XicPiPi.dec') + p = DecFileParser(DIR / "../data/test_Xicc2XicPiPi.dec") p.parse() # Decay of anti-Xi_cc-sig missing assert p.number_of_decays == 3 - assert 'anti-Xi_cc-sig' not in p.list_decay_mother_names() + assert "anti-Xi_cc-sig" not in p.list_decay_mother_names() # CDecay statements - assert 'anti-Xi_cc-sig' in p.list_charge_conjugate_decays() + assert "anti-Xi_cc-sig" in p.list_charge_conjugate_decays() def test_decay_mode_details(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - tree_Dp = p._find_decay_modes('D+')[0] - output = (1.0, ['K-', 'pi+', 'pi+', 'pi0'], 'PHSP', '') + tree_Dp = p._find_decay_modes("D+")[0] + output = (1.0, ["K-", "pi+", "pi+", "pi0"], "PHSP", "") assert p._decay_mode_details(tree_Dp) == output @@ -238,24 +254,24 @@ def test_decay_model_parsing(): This module tests building blocks rather than the API, hence the "strange" way to access parsed Lark Tree instances. """ - p = DecFileParser(DIR / '../data/test_Bd2DstDst.dec') + p = DecFileParser(DIR / "../data/test_Bd2DstDst.dec") p.parse() # Simple decay model without model parameters dl = p._parsed_decays[2].children[1] # 'MySecondD*+' Tree - assert get_model_name(dl) == 'VSS' - assert get_model_parameters(dl) == '' + assert get_model_name(dl) == "VSS" + assert get_model_parameters(dl) == "" # Decay model with a set of floating-point model parameters dl = p._parsed_decays[0].children[1] # 'B0sig' Tree - assert get_model_name(dl) == 'SVV_HELAMP' + assert get_model_name(dl) == "SVV_HELAMP" assert get_model_parameters(dl) == [0.0, 0.0, 0.0, 0.0, 1.0, 0.0] # Decay model where model parameter is a string, # which matches an XML file for EvtGen dl = p._parsed_decays[4].children[1] # 'MyD0' Tree - assert get_model_name(dl) == 'LbAmpGen' - assert get_model_parameters(dl) == ['DtoKpipipi_v1'] + assert get_model_name(dl) == "LbAmpGen" + assert get_model_parameters(dl) == ["DtoKpipipi_v1"] def test_decay_model_parsing_with_variable_defs(): @@ -265,31 +281,39 @@ def test_decay_model_parsing_with_variable_defs(): 'Define dm 0.507e12'. The parser should recognise this and return [0.507e12] rather than ['dm'] as model parameters. """ - p = DecFileParser(DIR / '../data/test_Upsilon4S2B0B0bar.dec') + p = DecFileParser(DIR / "../data/test_Upsilon4S2B0B0bar.dec") p.parse() - assert p.dict_definitions() == {'dm': 507000000000.0} + assert p.dict_definitions() == {"dm": 507000000000.0} dl = p._parsed_decays[0].children[1] - assert get_model_name(dl) == 'VSS_BMIX' + assert get_model_name(dl) == "VSS_BMIX" assert get_model_parameters(dl) == [0.507e12] def test_duplicate_decay_definitions(): - p = DecFileParser(DIR / '../data/duplicate-decays.dec') + p = DecFileParser(DIR / "../data/duplicate-decays.dec") p.parse() assert p.number_of_decays == 2 - assert p.list_decay_mother_names() == ['Sigma(1775)0', 'anti-Sigma(1775)0'] + assert p.list_decay_mother_names() == ["Sigma(1775)0", "anti-Sigma(1775)0"] def test_list_decay_modes(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - assert p.list_decay_modes('D*-') == [['anti-D0', 'pi-'], ['D-', 'pi0'], ['D-', 'gamma']] - assert p.list_decay_modes('D*(2010)-', pdg_name=True) == [['anti-D0', 'pi-'], ['D-', 'pi0'], ['D-', 'gamma']] + assert p.list_decay_modes("D*-") == [ + ["anti-D0", "pi-"], + ["D-", "pi0"], + ["D-", "gamma"], + ] + assert p.list_decay_modes("D*(2010)-", pdg_name=True) == [ + ["anti-D0", "pi-"], + ["D-", "pi0"], + ["D-", "gamma"], + ] def test_list_decay_modes_on_the_fly(): @@ -297,93 +321,146 @@ def test_list_decay_modes_on_the_fly(): Unlike in the example above the charge conjugate decay modes are created on the fly from the non-CC. decay. """ - p = DecFileParser(DIR / '../data/test_Xicc2XicPiPi.dec') + p = DecFileParser(DIR / "../data/test_Xicc2XicPiPi.dec") p.parse() # Parsed directly from the dec file - assert p.list_decay_modes('MyXic+') == [['p+', 'K-', 'pi+']] + assert p.list_decay_modes("MyXic+") == [["p+", "K-", "pi+"]] # Decay mode created on-the-fly from the above - assert p.list_decay_modes('MyantiXic-') == [['anti-p-', 'K+', 'pi-']] + assert p.list_decay_modes("MyantiXic-") == [["anti-p-", "K+", "pi-"]] def test_print_decay_modes(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() with pytest.raises(DecayNotFound): - p.print_decay_modes('D*(2010)-') + p.print_decay_modes("D*(2010)-") - p.print_decay_modes('D*(2010)-', pdg_name=True) + p.print_decay_modes("D*(2010)-", pdg_name=True) def test_build_decay_chains(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - output = {'D+': [{'bf': 1.0, 'fs': ['K-', 'pi+', 'pi+', 'pi0'], 'model': 'PHSP', 'model_params': ''}]} - assert p.build_decay_chains('D+', stable_particles=['pi0']) == output + output = { + "D+": [ + { + "bf": 1.0, + "fs": ["K-", "pi+", "pi+", "pi0"], + "model": "PHSP", + "model_params": "", + } + ] + } + assert p.build_decay_chains("D+", stable_particles=["pi0"]) == output def test_Lark_DecayModelParamValueReplacement_Visitor_no_params(): - t = Tree('decay', [Tree('particle', [Token('LABEL', 'D0')]), - Tree('decayline', [Tree('value', [Token('SIGNED_NUMBER', '1.0')]), - Tree('particle', [Token('LABEL', 'K-')]), - Tree('particle', [Token('LABEL', 'pi+')]), - Tree('model', [Token('MODEL_NAME', 'PHSP')])])]) + t = Tree( + "decay", + [ + Tree("particle", [Token("LABEL", "D0")]), + Tree( + "decayline", + [ + Tree("value", [Token("SIGNED_NUMBER", "1.0")]), + Tree("particle", [Token("LABEL", "K-")]), + Tree("particle", [Token("LABEL", "pi+")]), + Tree("model", [Token("MODEL_NAME", "PHSP")]), + ], + ), + ], + ) DecayModelParamValueReplacement().visit(t) # The visitor should do nothing in this case - tree_decayline = list(t.find_data('decayline'))[0] - assert get_model_name(tree_decayline) == 'PHSP' - assert get_model_parameters(tree_decayline) == '' + tree_decayline = list(t.find_data("decayline"))[0] + assert get_model_name(tree_decayline) == "PHSP" + assert get_model_parameters(tree_decayline) == "" def test_Lark_DecayModelParamValueReplacement_Visitor_single_value(): - t = Tree('decay', [Tree('particle', [Token('LABEL', 'Upsilon(4S)')]), - Tree('decayline', [Tree('value', [Token('SIGNED_NUMBER', '1.0')]), - Tree('particle', [Token('LABEL', 'B0')]), - Tree('particle', [Token('LABEL', 'anti-B0')]), - Tree('model', [Token('MODEL_NAME', 'VSS_BMIX'), - Tree('model_options', [Token('LABEL', 'dm')])])])]) + t = Tree( + "decay", + [ + Tree("particle", [Token("LABEL", "Upsilon(4S)")]), + Tree( + "decayline", + [ + Tree("value", [Token("SIGNED_NUMBER", "1.0")]), + Tree("particle", [Token("LABEL", "B0")]), + Tree("particle", [Token("LABEL", "anti-B0")]), + Tree( + "model", + [ + Token("MODEL_NAME", "VSS_BMIX"), + Tree("model_options", [Token("LABEL", "dm")]), + ], + ), + ], + ), + ], + ) DecayModelParamValueReplacement().visit(t) # Nothing done since model parameter name has no corresponding # 'Define' statement from which the actual value can be inferred - tree_decayline = list(t.find_data('decayline'))[0] - assert get_model_name(tree_decayline) == 'VSS_BMIX' - assert get_model_parameters(tree_decayline) == ['dm'] + tree_decayline = list(t.find_data("decayline"))[0] + assert get_model_name(tree_decayline) == "VSS_BMIX" + assert get_model_parameters(tree_decayline) == ["dm"] - dict_define_defs = {'dm': 0.507e12} + dict_define_defs = {"dm": 0.507e12} DecayModelParamValueReplacement(define_defs=dict_define_defs).visit(t) # The model parameter 'dm' should now be replaced by its value - assert get_model_name(tree_decayline) == 'VSS_BMIX' + assert get_model_name(tree_decayline) == "VSS_BMIX" assert get_model_parameters(tree_decayline) == [507000000000.0] def test_Lark_DecayModelParamValueReplacement_Visitor_list(): - t = Tree('decay', [Tree('particle', [Token('LABEL', 'B0sig')]), - Tree('decayline', [Tree('value', [Token('SIGNED_NUMBER', '1.000')]), - Tree('particle', [Token('LABEL', 'MyFirstD*-')]), - Tree('particle', [Token('LABEL', 'MySecondD*+')]), - Tree('model', [Token('MODEL_NAME', 'SVV_HELAMP'), - Tree('model_options', - [Tree('value', [Token('SIGNED_NUMBER', '0.0')]), - Tree('value', [Token('SIGNED_NUMBER', '0.0')]), - Tree('value', [Token('SIGNED_NUMBER', '0.0')]), - Tree('value', [Token('SIGNED_NUMBER', '0.0')]), - Tree('value', [Token('SIGNED_NUMBER', '1.0')]), - Tree('value', [Token('SIGNED_NUMBER', '0.0')])])])])]) + t = Tree( + "decay", + [ + Tree("particle", [Token("LABEL", "B0sig")]), + Tree( + "decayline", + [ + Tree("value", [Token("SIGNED_NUMBER", "1.000")]), + Tree("particle", [Token("LABEL", "MyFirstD*-")]), + Tree("particle", [Token("LABEL", "MySecondD*+")]), + Tree( + "model", + [ + Token("MODEL_NAME", "SVV_HELAMP"), + Tree( + "model_options", + [ + Tree("value", [Token("SIGNED_NUMBER", "0.0")]), + Tree("value", [Token("SIGNED_NUMBER", "0.0")]), + Tree("value", [Token("SIGNED_NUMBER", "0.0")]), + Tree("value", [Token("SIGNED_NUMBER", "0.0")]), + Tree("value", [Token("SIGNED_NUMBER", "1.0")]), + Tree("value", [Token("SIGNED_NUMBER", "0.0")]), + ], + ), + ], + ), + ], + ), + ], + ) DecayModelParamValueReplacement().visit(t) # The visitor should do nothing in this case - tree_decayline = list(t.find_data('decayline'))[0] - assert get_model_name(tree_decayline) == 'SVV_HELAMP' + tree_decayline = list(t.find_data("decayline"))[0] + assert get_model_name(tree_decayline) == "SVV_HELAMP" assert get_model_parameters(tree_decayline) == [0.0, 0.0, 0.0, 0.0, 1.0, 0.0] @@ -393,16 +470,26 @@ def test_Lark_ChargeConjugateReplacement_Visitor(): of a Lark's Visitor, here replacing all particles in a 'decay' Tree by their antiparticles. """ - t = Tree('decay', [Tree('particle', [Token('LABEL', 'D0')]), - Tree('decayline', [Tree('value', [Token('SIGNED_NUMBER', '1.0')]), - Tree('particle', [Token('LABEL', 'K-')]), - Tree('particle', [Token('LABEL', 'pi+')]), - Tree('model', [Token('MODEL_NAME', 'PHSP')])])]) + t = Tree( + "decay", + [ + Tree("particle", [Token("LABEL", "D0")]), + Tree( + "decayline", + [ + Tree("value", [Token("SIGNED_NUMBER", "1.0")]), + Tree("particle", [Token("LABEL", "K-")]), + Tree("particle", [Token("LABEL", "pi+")]), + Tree("model", [Token("MODEL_NAME", "PHSP")]), + ], + ), + ], + ) ChargeConjugateReplacement().visit(t) - assert get_decay_mother_name(t) == 'anti-D0' - assert get_final_state_particle_names(t.children[1]) == ['K+', 'pi-'] + assert get_decay_mother_name(t) == "anti-D0" + assert get_final_state_particle_names(t.children[1]) == ["K+", "pi-"] def test_Lark_ChargeConjugateReplacement_Visitor_with_aliases(): @@ -415,18 +502,28 @@ def test_Lark_ChargeConjugateReplacement_Visitor_with_aliases(): ChargeConj MyD0 MyAnti-D0 A dictionary of matches should be passed to the Lark Visitor instance. """ - t = Tree('decay', [Tree('particle', [Token('LABEL', 'MyD0')]), - Tree('decayline', [Tree('value', [Token('SIGNED_NUMBER', '1.0')]), - Tree('particle', [Token('LABEL', 'K-')]), - Tree('particle', [Token('LABEL', 'pi+')]), - Tree('model', [Token('MODEL_NAME', 'PHSP')])])]) - - dict_ChargeConj_defs = {'MyD0': 'MyAnti-D0'} + t = Tree( + "decay", + [ + Tree("particle", [Token("LABEL", "MyD0")]), + Tree( + "decayline", + [ + Tree("value", [Token("SIGNED_NUMBER", "1.0")]), + Tree("particle", [Token("LABEL", "K-")]), + Tree("particle", [Token("LABEL", "pi+")]), + Tree("model", [Token("MODEL_NAME", "PHSP")]), + ], + ), + ], + ) + + dict_ChargeConj_defs = {"MyD0": "MyAnti-D0"} ChargeConjugateReplacement(charge_conj_defs=dict_ChargeConj_defs).visit(t) - assert get_decay_mother_name(t) == 'MyAnti-D0' - assert get_final_state_particle_names(t.children[1]) == ['K+', 'pi-'] + assert get_decay_mother_name(t) == "MyAnti-D0" + assert get_final_state_particle_names(t.children[1]) == ["K+", "pi-"] def test_creation_charge_conjugate_decays_in_decfile_with_aliases(): @@ -436,7 +533,7 @@ def test_creation_charge_conjugate_decays_in_decfile_with_aliases(): The decay modes for the latter 5 should be created on the fly, hence providing in total 10 sets of particle decays parsed. """ - p = DecFileParser(DIR / '../data/test_Bd2DstDst.dec') + p = DecFileParser(DIR / "../data/test_Bd2DstDst.dec") p.parse() assert p.number_of_decays == 10 @@ -452,26 +549,28 @@ def test_creation_charge_conjugate_decays_in_decfile_without_CDecay_defs(): as there are no instructions on how to decay the anti-D0 and the D-! In short, there should only be 5 sets of decay modes parsed. """ - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() assert p.number_of_decays == 5 def test_master_DECAYdotDEC_file(): - p = DecFileParser(DIR / '../../decaylanguage/data/DECAY_LHCB.DEC') + p = DecFileParser(DIR / "../../decaylanguage/data/DECAY_LHCB.DEC") p.parse() assert p.number_of_decays == 506 + def test_BELLE2_decfile(): - p = DecFileParser(DIR / '../../decaylanguage/data/DECAY_BELLE2.DEC') + p = DecFileParser(DIR / "../../decaylanguage/data/DECAY_BELLE2.DEC") p.parse() # Just check the dec file will parse since I do not know # how many decays are in the dec file. assert p.number_of_decays == 356 + def test_lark_file_model_list_consistency(): """ Make sure that the list of known decay models in the grammar file @@ -479,12 +578,13 @@ def test_lark_file_model_list_consistency(): to the user via 'from decaylanguage.dec.enums import known_decay_models'. """ - filename = str(DIR / '../../decaylanguage/data/decfile.lark') + filename = str(DIR / "../../decaylanguage/data/decfile.lark") with open(filename) as lark_file: lines = lark_file.readlines() for line in lines: - if 'MODEL_NAME.2' in line: break - models = line.split(':')[1].strip(' ').strip('\n').split('"|"') + if "MODEL_NAME.2" in line: + break + models = line.split(":")[1].strip(" ").strip("\n").split('"|"') models = [m.strip('"') for m in models] assert models == list(known_decay_models) diff --git a/tests/dec/test_issues.py b/tests/dec/test_issues.py index d9bce1af..f6e97636 100644 --- a/tests/dec/test_issues.py +++ b/tests/dec/test_issues.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -19,5 +20,5 @@ def test_issue_90(): with pytest.raises(UnexpectedToken): - p = DecFileParser(DIR / '../data/test_issue90.dec') + p = DecFileParser(DIR / "../data/test_issue90.dec") p.parse() diff --git a/tests/decay/test_decay.py b/tests/decay/test_decay.py index 758e8c86..f1346ac3 100644 --- a/tests/decay/test_decay.py +++ b/tests/decay/test_decay.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -12,186 +13,243 @@ def test_DaughtersDict_constructor_from_dict(): - dd = DaughtersDict({'K+': 1, 'K-': 2, 'pi+': 1, 'pi0': 1}) - assert dd == {'K+': 1, 'K-': 2, 'pi+': 1, 'pi0': 1} + dd = DaughtersDict({"K+": 1, "K-": 2, "pi+": 1, "pi0": 1}) + assert dd == {"K+": 1, "K-": 2, "pi+": 1, "pi0": 1} def test_DaughtersDict_constructor_from_list(): - dd = DaughtersDict(['K+', 'K-', 'K-', 'pi+', 'pi0']) - assert dd == {'K+': 1, 'K-': 2, 'pi+': 1, 'pi0': 1} + dd = DaughtersDict(["K+", "K-", "K-", "pi+", "pi0"]) + assert dd == {"K+": 1, "K-": 2, "pi+": 1, "pi0": 1} def test_DaughtersDict_constructor_from_string(): - dd = DaughtersDict('K+ K- pi0') - assert dd == {'K+': 1, 'K-': 1, 'pi0': 1} + dd = DaughtersDict("K+ K- pi0") + assert dd == {"K+": 1, "K-": 1, "pi0": 1} def test_DaughtersDict_string_repr(): - dd = DaughtersDict(['K+', 'K-', 'K-', 'pi+', 'pi0']) + dd = DaughtersDict(["K+", "K-", "K-", "pi+", "pi0"]) assert dd.__str__() == "" def test_DaughtersDict_len(): - dd = DaughtersDict({'K+': 1, 'K-': 3, 'pi0': 1}) + dd = DaughtersDict({"K+": 1, "K-": 3, "pi0": 1}) assert len(dd) == 5 def test_DaughtersDict_add(): - dd1 = DaughtersDict({'K+': 1, 'K-': 2, 'pi0': 3}) - dd2 = DaughtersDict({'K+': 1, 'K-': 1}) + dd1 = DaughtersDict({"K+": 1, "K-": 2, "pi0": 3}) + dd2 = DaughtersDict({"K+": 1, "K-": 1}) dd3 = dd1 + dd2 assert len(dd3) == 8 def test_DaughtersDict_to_string(): - dd1 = DaughtersDict({'K+': 1, 'K-': 2, 'pi0': 3}) - assert dd1.to_string() == 'K+ K- K- pi0 pi0 pi0' + dd1 = DaughtersDict({"K+": 1, "K-": 2, "pi0": 3}) + assert dd1.to_string() == "K+ K- K- pi0 pi0 pi0" def test_DecayMode_constructor_default(): dm = DecayMode() assert dm.bf == 0 assert dm.daughters == DaughtersDict() - assert dm.metadata == dict(model='', model_params='') + assert dm.metadata == dict(model="", model_params="") def test_DecayMode_constructor_simplest(): - dm = DecayMode(0.1234, 'K+ K-') + dm = DecayMode(0.1234, "K+ K-") assert dm.bf == 0.1234 - assert dm.daughters == DaughtersDict('K+ K-') - assert dm.metadata == dict(model='', model_params='') + assert dm.daughters == DaughtersDict("K+ K-") + assert dm.metadata == dict(model="", model_params="") def test_DecayMode_constructor_simple(): - dd = DaughtersDict('K+ K-') + dd = DaughtersDict("K+ K-") dm = DecayMode(0.1234, dd) assert dm.bf == 0.1234 - assert dm.daughters == DaughtersDict('K+ K-') - assert dm.metadata == dict(model='', model_params='') + assert dm.daughters == DaughtersDict("K+ K-") + assert dm.metadata == dict(model="", model_params="") def test_DecayMode_constructor_from_pdgids(): - dm = DecayMode.from_pdgids(0.5, [321, -321], - model='TAUHADNU', - model_params=[-0.108, 0.775, 0.149, 1.364, 0.400]) - assert dm.daughters == DaughtersDict('K+ K-') + dm = DecayMode.from_pdgids( + 0.5, + [321, -321], + model="TAUHADNU", + model_params=[-0.108, 0.775, 0.149, 1.364, 0.400], + ) + assert dm.daughters == DaughtersDict("K+ K-") def test_DecayMode_constructor_with_model_info(): - dd = DaughtersDict('pi- pi0 nu_tau') - dm = DecayMode(0.2551, dd, - model='TAUHADNU', - model_params=[-0.108, 0.775, 0.149, 1.364, 0.400]) - assert dm.metadata == {'model': 'TAUHADNU', - 'model_params': [-0.108, 0.775, 0.149, 1.364, 0.4]} + dd = DaughtersDict("pi- pi0 nu_tau") + dm = DecayMode( + 0.2551, dd, model="TAUHADNU", model_params=[-0.108, 0.775, 0.149, 1.364, 0.400] + ) + assert dm.metadata == { + "model": "TAUHADNU", + "model_params": [-0.108, 0.775, 0.149, 1.364, 0.4], + } def test_DecayMode_constructor_with_user_model_info(): - dd = DaughtersDict('K+ K-') - dm = DecayMode(0.5, dd, model='PHSP', study='toy', year=2019) - assert dm.metadata == {'model': 'PHSP', - 'model_params': '', - 'study': 'toy', - 'year': 2019} + dd = DaughtersDict("K+ K-") + dm = DecayMode(0.5, dd, model="PHSP", study="toy", year=2019) + assert dm.metadata == { + "model": "PHSP", + "model_params": "", + "study": "toy", + "year": 2019, + } def test_DecayMode_constructor_from_dict(): - dm = DecayMode.from_dict({'bf': 0.98823, - 'fs': ['gamma', 'gamma'], - 'model': 'PHSP', - 'model_params': ''}) + dm = DecayMode.from_dict( + {"bf": 0.98823, "fs": ["gamma", "gamma"], "model": "PHSP", "model_params": ""} + ) assert str(dm) == "" def test_DecayMode_describe_simple(): - dd = DaughtersDict('pi- pi0 nu_tau') - dm = DecayMode(0.2551, dd, model='TAUHADNU', model_params=[-0.108, 0.775, 0.149, 1.364, 0.400]) - assert 'BF: 0.2551' in dm.describe() - assert 'Decay model: TAUHADNU [-0.108, 0.775, 0.149, 1.364, 0.4]' in dm.describe() + dd = DaughtersDict("pi- pi0 nu_tau") + dm = DecayMode( + 0.2551, dd, model="TAUHADNU", model_params=[-0.108, 0.775, 0.149, 1.364, 0.400] + ) + assert "BF: 0.2551" in dm.describe() + assert "Decay model: TAUHADNU [-0.108, 0.775, 0.149, 1.364, 0.4]" in dm.describe() def test_DecayMode_describe_with_extra_info(): - dd = DaughtersDict('K+ K-') - dm = DecayMode(1.e-6, dd, model='PHSP', study='toy', year=2019) - assert 'Extra info:' in dm.describe() - assert 'study: toy' in dm.describe() - assert 'year: 2019' in dm.describe() + dd = DaughtersDict("K+ K-") + dm = DecayMode(1.0e-6, dd, model="PHSP", study="toy", year=2019) + assert "Extra info:" in dm.describe() + assert "study: toy" in dm.describe() + assert "year: 2019" in dm.describe() def test_DecayMode_charge_conjugate(): - dd = DaughtersDict('pi- pi0 nu_tau') - dm = DecayMode(0.2551, dd, model='TAUHADNU', model_params=[-0.108, 0.775, 0.149, 1.364, 0.400]) + dd = DaughtersDict("pi- pi0 nu_tau") + dm = DecayMode( + 0.2551, dd, model="TAUHADNU", model_params=[-0.108, 0.775, 0.149, 1.364, 0.400] + ) dm_cc = dm.charge_conjugate() - assert dm_cc.daughters == DaughtersDict('pi+ pi0 anti-nu_tau') - assert 'BF: 0.2551' in dm.describe() - assert 'Decay model: TAUHADNU [-0.108, 0.775, 0.149, 1.364, 0.4]' in dm.describe() + assert dm_cc.daughters == DaughtersDict("pi+ pi0 anti-nu_tau") + assert "BF: 0.2551" in dm.describe() + assert "Decay model: TAUHADNU [-0.108, 0.775, 0.149, 1.364, 0.4]" in dm.describe() - dd = DaughtersDict('pi- pi0 nu(tau)') - dd.charge_conjugate(pdg_name=True) == DaughtersDict('pi+ pi0 nu(tau)~') + dd = DaughtersDict("pi- pi0 nu(tau)") + dd.charge_conjugate(pdg_name=True) == DaughtersDict("pi+ pi0 nu(tau)~") def test_DecayMode_string_repr(): - dd = DaughtersDict('p p~ K+ pi-') - dm = DecayMode(1.e-6, dd, model='PHSP') + dd = DaughtersDict("p p~ K+ pi-") + dm = DecayMode(1.0e-6, dd, model="PHSP") assert str(dm) == "" def test_DecayMode_number_of_final_states(): - dd = DaughtersDict('p p~ K+ pi-') - dm = DecayMode(1.e-6, dd, model='PHSP') + dd = DaughtersDict("p p~ K+ pi-") + dm = DecayMode(1.0e-6, dd, model="PHSP") assert len(dm) == 4 -dm1 = DecayMode(0.0124, 'K_S0 pi0', model='PHSP') -dm2 = DecayMode(0.692, 'pi+ pi-') -dm3 = DecayMode(0.98823, 'gamma gamma') -dc = DecayChain('D0', {'D0':dm1, 'K_S0':dm2, 'pi0':dm3}) +dm1 = DecayMode(0.0124, "K_S0 pi0", model="PHSP") +dm2 = DecayMode(0.692, "pi+ pi-") +dm3 = DecayMode(0.98823, "gamma gamma") +dc = DecayChain("D0", {"D0": dm1, "K_S0": dm2, "pi0": dm3}) -dm1 = DecayMode(0.6770, 'D0 pi+') -dm2 = DecayMode(0.0124, 'K_S0 pi0') -dm3 = DecayMode(0.692, 'pi+ pi-') -dm4 = DecayMode(0.98823, 'gamma gamma') -dc2 = DecayChain('D*+', {'D*+':dm1, 'D0':dm2, 'K_S0':dm3, 'pi0':dm4}) +dm1 = DecayMode(0.6770, "D0 pi+") +dm2 = DecayMode(0.0124, "K_S0 pi0") +dm3 = DecayMode(0.692, "pi+ pi-") +dm4 = DecayMode(0.98823, "gamma gamma") +dc2 = DecayChain("D*+", {"D*+": dm1, "D0": dm2, "K_S0": dm3, "pi0": dm4}) def test_DecayChain_constructor_subdecays(): assert len(dc.decays) == 3 - assert dc.mother == 'D0' + assert dc.mother == "D0" def test_DecayChain_constructor_from_dict(): - dc_dict = {'D0': [{'bf': 0.0124, - 'fs': [{'K_S0': [{'bf': 0.692, - 'fs': ['pi+', 'pi-'], - 'model': '', - 'model_params': ''}]}, - {'pi0': [{'bf': 0.98823, - 'fs': ['gamma', 'gamma'], - 'model': '', - 'model_params': ''}]}], - 'model': 'PHSP', - 'model_params': ''}] - } + dc_dict = { + "D0": [ + { + "bf": 0.0124, + "fs": [ + { + "K_S0": [ + { + "bf": 0.692, + "fs": ["pi+", "pi-"], + "model": "", + "model_params": "", + } + ] + }, + { + "pi0": [ + { + "bf": 0.98823, + "fs": ["gamma", "gamma"], + "model": "", + "model_params": "", + } + ] + }, + ], + "model": "PHSP", + "model_params": "", + } + ] + } assert DecayChain.from_dict(dc_dict).to_dict() == dc_dict def test_DecayChain_to_dict(): - assert dc2.to_dict() == {'D*+': [{'bf': 0.677, - 'fs': [{'D0': [{'bf': 0.0124, - 'fs': [{'K_S0': [{'bf': 0.692, - 'fs': ['pi+', 'pi-'], - 'model': '', - 'model_params': ''}]}, - {'pi0': [{'bf': 0.98823, - 'fs': ['gamma', 'gamma'], - 'model': '', - 'model_params': ''}]}], - 'model': '', - 'model_params': ''}]}, - 'pi+'], - 'model': '', - 'model_params': ''}]} + assert dc2.to_dict() == { + "D*+": [ + { + "bf": 0.677, + "fs": [ + { + "D0": [ + { + "bf": 0.0124, + "fs": [ + { + "K_S0": [ + { + "bf": 0.692, + "fs": ["pi+", "pi-"], + "model": "", + "model_params": "", + } + ] + }, + { + "pi0": [ + { + "bf": 0.98823, + "fs": ["gamma", "gamma"], + "model": "", + "model_params": "", + } + ] + }, + ], + "model": "", + "model_params": "", + } + ] + }, + "pi+", + ], + "model": "", + "model_params": "", + } + ] + } def test_DecayChain_properties(): diff --git a/tests/decay/test_viewer.py b/tests/decay/test_viewer.py index e86a76f7..0aff896e 100644 --- a/tests/decay/test_viewer.py +++ b/tests/decay/test_viewer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -21,10 +22,10 @@ def test_single_decay(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - chain = p.build_decay_chains('D*+', stable_particles=['D+', 'D0', 'pi0']) + chain = p.build_decay_chains("D*+", stable_particles=["D+", "D0", "pi0"]) dcv = DecayChainViewer(chain) graph_output_as_dot = dcv.to_string() @@ -34,10 +35,10 @@ def test_single_decay(): def test_simple_decay_chain(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - chain = p.build_decay_chains('D*+') + chain = p.build_decay_chains("D*+") dcv = DecayChainViewer(chain) graph_output_as_dot = dcv.to_string() @@ -49,13 +50,13 @@ def test_simple_decay_chain(): checklist_decfiles = ( - (DIR / '../data/test_Bc2BsPi_Bs2KK.dec', 'B_c+sig'), - (DIR / '../data/test_Bd2DDst_Ds2DmPi0.dec', 'B0sig'), - (DIR / '../data/test_Bd2DmTauNu_Dm23PiPi0_Tau2MuNu.dec', 'B0sig'), - (DIR / '../data/test_Bd2DMuNu.dec', 'anti-B0sig'), - (DIR / '../data/test_Bd2DstDst.dec', 'anti-B0sig'), - (DIR / '../data/test_example_Dst.dec', 'D*+'), - (DIR / '../data/test_Xicc2XicPiPi.dec', 'Xi_cc+sig') + (DIR / "../data/test_Bc2BsPi_Bs2KK.dec", "B_c+sig"), + (DIR / "../data/test_Bd2DDst_Ds2DmPi0.dec", "B0sig"), + (DIR / "../data/test_Bd2DmTauNu_Dm23PiPi0_Tau2MuNu.dec", "B0sig"), + (DIR / "../data/test_Bd2DMuNu.dec", "anti-B0sig"), + (DIR / "../data/test_Bd2DstDst.dec", "anti-B0sig"), + (DIR / "../data/test_example_Dst.dec", "D*+"), + (DIR / "../data/test_Xicc2XicPiPi.dec", "Xi_cc+sig"), ) @@ -66,49 +67,52 @@ def test_duplicate_arrows(decfilepath, signal_mother): gets more than one arrow to it, which would show a bug in the creation of the DOT file recursively parsing the built decay chain. """ - p = DecFileParser(decfilepath, DIR / '../../decaylanguage/data/DECAY_LHCB.DEC') + p = DecFileParser(decfilepath, DIR / "../../decaylanguage/data/DECAY_LHCB.DEC") p.parse() chain = p.build_decay_chains(signal_mother) dcv = DecayChainViewer(chain) graph_output_as_dot = dcv.to_string() - l = [i.split(' ')[0] for i in graph_output_as_dot.split('-> dec')[1:]] # list of node identifiers + l = [ + i.split(" ")[0] for i in graph_output_as_dot.split("-> dec")[1:] + ] # list of node identifiers assert len(set(l)) == len(l) def test_init_non_defaults(): - p = DecFileParser(DIR / '../data/test_example_Dst.dec') + p = DecFileParser(DIR / "../data/test_example_Dst.dec") p.parse() - chain = p.build_decay_chains('D*+') - dcv = DecayChainViewer(chain, graph_name='TEST', rankdir='TB') + chain = p.build_decay_chains("D*+") + dcv = DecayChainViewer(chain, graph_name="TEST", rankdir="TB") - assert dcv.graph.get_name() == 'TEST' - assert dcv.graph.get_rankdir() == 'TB' + assert dcv.graph.get_name() == "TEST" + assert dcv.graph.get_rankdir() == "TB" def test_graphs_with_EvtGen_specific_names(): - p = DecFileParser(DIR / '../../decaylanguage/data/DECAY_LHCB.DEC') + p = DecFileParser(DIR / "../../decaylanguage/data/DECAY_LHCB.DEC") p.parse() # Not setting many of the particles as stable would result in a gargantuesque chain, # which would also take a fair amount of time to build! - list_stable_particles = ['Xi_c0', - 'Xi-', - 'D0', - 'Omega_c0', - 'Sigma_c0', - 'tau-', - 'D_s-', - 'J/psi', - 'pi0', - 'Lambda0', - 'psi(2S)' - ] - - chain = p.build_decay_chains('Xi_b-', stable_particles=list_stable_particles) + list_stable_particles = [ + "Xi_c0", + "Xi-", + "D0", + "Omega_c0", + "Sigma_c0", + "tau-", + "D_s-", + "J/psi", + "pi0", + "Lambda0", + "psi(2S)", + ] + + chain = p.build_decay_chains("Xi_b-", stable_particles=list_stable_particles) dcv = DecayChainViewer(chain) - assert '(cs)0' in dcv.to_string() # not 'cs_0' ;-) - assert 'Ξc0' in dcv.to_string() + assert "(cs)0" in dcv.to_string() # not 'cs_0' ;-) + assert "Ξc0" in dcv.to_string() diff --git a/tests/output/DtoKpipipi_v2.cu b/tests/output/DtoKpipipi_v2.cu index 55dc64c6..e71a658a 100644 --- a/tests/output/DtoKpipipi_v2.cu +++ b/tests/output/DtoKpipipi_v2.cu @@ -48,11 +48,11 @@ Dtos1P1_s1toS2P2_S2toP3P4 : SF_4Body.DtoPP1_PtoSP2_StoP3P4 D0{K(1460)~-[GSpline.EFF]{PiPi30[kMatrix.prod.1]{pi+,pi-},K-},pi+} - Scalar: PiPi00 PiPi10 PiPi20 PiPi30 KPi00 KPi10 KPi20 -PseudoScalar: pi+ pi- K- D0 K(1460)~- - Vector: rho(770)0 omega(782)0 K(892)~0 rho(1450)0 + Scalar: PiPi00 PiPi10 PiPi20 PiPi30 KPi00 KPi10 KPi20 +PseudoScalar: pi+ pi- K- D0 K(1460)~- + Vector: rho(770)0 omega(782)0 K(892)~0 rho(1450)0 Axial: K(1)(1270)~- a(1)(1260)+ K(1)(1400)~- - Tensor: K(2)~- + Tensor: K(2)~- PseudoTensor: Unknown: @@ -1535,5 +1535,3 @@ ONE 2}); DK3P_DI.amplitudes_B.push_back(amplitudes_list.back()); - - diff --git a/tests/test_convert.py b/tests/test_convert.py index f1dd6534..defd9d57 100644 --- a/tests/test_convert.py +++ b/tests/test_convert.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -18,7 +19,8 @@ @pytest.mark.skip def test_full_convert(): - text = ampgen2goofit(DIR / '../models/DtoKpipipi_v2.txt', ret_output=True) - with (DIR / 'output/DtoKpipipi_v2.cu').open() as f: - assert (set(x.strip() for x in text.splitlines() if 'Generated on' not in x) - == set(x.strip() for x in f.readlines() if 'Generated on' not in x)) + text = ampgen2goofit(DIR / "../models/DtoKpipipi_v2.txt", ret_output=True) + with (DIR / "output/DtoKpipipi_v2.cu").open() as f: + assert set( + x.strip() for x in text.splitlines() if "Generated on" not in x + ) == set(x.strip() for x in f.readlines() if "Generated on" not in x) diff --git a/tests/test_dec_full.py b/tests/test_dec_full.py index 143d5cf3..2be67952 100644 --- a/tests/test_dec_full.py +++ b/tests/test_dec_full.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -11,6 +12,7 @@ import pytest + class TreeToDec2(Transformer): missing = set() keyerrs = set() @@ -19,7 +21,7 @@ def __init__(self, alias_dict): self.alias_dict = alias_dict def particle(self, items): - label, = items + (label,) = items if label in self.alias_dict: label = self.alias_dict[label] try: @@ -31,15 +33,16 @@ def particle(self, items): self.keyerrs.add(str(label)) return str(label) + @pytest.mark.skip def test_dec_full(): - with data.open_text(data, 'DECAY_LHCB.DEC') as f: + with data.open_text(data, "DECAY_LHCB.DEC") as f: txt = f.read() - with data.open_text(data, 'decfile.lark') as f: + with data.open_text(data, "decfile.lark") as f: grammar = f.read() - l = Lark(grammar, parser='lalr', lexer='standard') # , transformer = TreeToDec()) + l = Lark(grammar, parser="lalr", lexer="standard") # , transformer = TreeToDec()) parsed = l.parse(txt) assert bool(parsed) @@ -58,7 +61,6 @@ def test_dec_full(): for item in pythia_def: print(item[0], ":", item[1], "=", item[2]) - labelled = TreeToDec2(alias).transform(decay) print(TreeToDec2.missing) diff --git a/tests/test_decaylanguage.py b/tests/test_decaylanguage.py index fc14f533..a692878e 100644 --- a/tests/test_decaylanguage.py +++ b/tests/test_decaylanguage.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE diff --git a/tests/test_goofit.py b/tests/test_goofit.py index 2699b48b..5c3fabd5 100644 --- a/tests/test_goofit.py +++ b/tests/test_goofit.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (c) 2018-2020, Eduardo Rodrigues and Henry Schreiner. # # Distributed under the 3-clause BSD license, see accompanying file LICENSE @@ -9,14 +10,16 @@ def test_simple(): - lines, all_states = GooFitChain.read_ampgen(text=''' + lines, all_states = GooFitChain.read_ampgen( + text=""" # This is a test (should not affect output) EventType D0 K- pi+ pi+ pi- D0[D]{K*(892)bar0{K-,pi+},rho(770)0{pi+,pi-}} 2 1 0 2 0 0 - ''') + """ + ) assert Particle.from_pdgid(421) == all_states[0] # D0 assert Particle.from_pdgid(-321) == all_states[1] # K- @@ -25,4 +28,4 @@ def test_simple(): assert Particle.from_pdgid(-211) == all_states[4] # pi- assert len(lines) == 1 - line, = lines + (line,) = lines