Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat!: support multiple backends #51

Draft
wants to merge 24 commits into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
99a156d
refactor: dynamically import tnp in backend module
redeboer May 27, 2021
8b9c454
refactor: create numpy interface for tf.random
redeboer May 27, 2021
abc86a2
refactor: generalize random module
redeboer May 27, 2021
8a3030f
refactor: move tf.shape to backend module
redeboer May 27, 2021
609754a
refactor: move tf.assert's to backend module
redeboer May 27, 2021
94ebe80
refactor: move Tensor/Variable to backend module
redeboer May 27, 2021
170c620
refactor: remove last tensorflow imports
redeboer May 27, 2021
e450c6c
test: add skip decorator for tests that requrie TF
redeboer May 27, 2021
3ff2a1c
test: use phasespace.backend in tests
redeboer May 27, 2021
2aa6f93
test: use uniform instead of uniform_full_int
redeboer May 27, 2021
fe2501a
feat: provide mapping for numpy as backend
redeboer May 27, 2021
d1bb7dd
refactor!: set numpy as default backend
redeboer May 27, 2021
3bd1f85
build!: move tensorflow requirements to extras
redeboer May 27, 2021
e98d7cd
ci: run pytest with NumPy as backend
redeboer May 27, 2021
290852c
fix: remove duplicate Tensor/Variable import
redeboer May 27, 2021
0dbfdfc
Merge branch 'master' into numpy-backend
redeboer May 28, 2021
1c80cc8
Merge branch 'master' into numpy-backend
redeboer Sep 21, 2021
8413194
revert: move tensorflow requirements to extras
redeboer Sep 21, 2021
669aaf7
Merge branch 'master' into numpy-backend
jonas-eschle Mar 31, 2022
dc9b865
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 31, 2022
1e23047
Merge branch 'master' into numpy-backend
jonas-eschle Aug 25, 2022
aeea1a7
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Aug 25, 2022
7707f58
Merge branch 'master' into numpy-backend
jonas-eschle Feb 8, 2024
1c321a3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -67,7 +67,13 @@ jobs:
cd ..
- name: Test with pytest (eager mode)
run: |
PHASESPACE_EAGER=1 pytest --basetemp={envtmpdir} tests
PHASESPACE_EAGER=1 pytest --basetemp={envtmpdir}
cd docs
PHASESPACE_EAGER=1 pytest --dist loadscope --nbval-lax --ignore=.ipynb_checkpoints -n${{ steps.cpu-cores.outputs.count }}
cd ..
- name: Test with pytest (NumPy backend)
run: |
PHASESPACE_BACKEND=NUMPY pytest --basetemp={envtmpdir}
cd docs
PHASESPACE_EAGER=NUMPY pytest --nbval-lax --ignore=.ipynb_checkpoints
cd ..
1 change: 1 addition & 0 deletions data/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/B2K*.root*
22 changes: 8 additions & 14 deletions phasespace/__init__.py
Original file line number Diff line number Diff line change
@@ -11,21 +11,15 @@
__email__ = "[email protected]"
__maintainer__ = "zfit"

__credits__ = ["Jonas Eschle <[email protected]>"]
__credits__ = [
"Jonas Eschle <[email protected]>",
]

__all__ = ["nbody_decay", "GenParticle", "random"]

import tensorflow as tf
__all__ = [
"GenParticle",
"nbody_decay",
"random",
]

from . import random
from .phasespace import GenParticle, nbody_decay


def _set_eager_mode():
import os

is_eager = bool(os.environ.get("PHASESPACE_EAGER"))
tf.config.run_functions_eagerly(is_eager)


_set_eager_mode()
21 changes: 0 additions & 21 deletions phasespace/backend.py

This file was deleted.

82 changes: 82 additions & 0 deletions phasespace/backend/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# flake8: noqa
import os
from enum import Enum, auto

__all__ = [
"function",
"function_jit",
"function_jit_fixedshape",
"get_shape",
"random",
"tnp",
]


class BackendType(Enum):
TENSORFLOW = auto()
NUMPY = auto()

@staticmethod
def get_backend(backend: str) -> "BackendType":
backend_formatted = backend.lower().strip()
if backend_formatted in {"", "np", "numpy"}:
return BackendType.NUMPY
if backend_formatted in {"tf", "tensorflow"}:
return BackendType.TENSORFLOW
raise NotImplementedError(f'No backend implemented for "{backend}"')


BACKEND = BackendType.get_backend(os.environ.get("PHASESPACE_BACKEND", ""))
if BACKEND == BackendType.TENSORFLOW:
import tensorflow as tf
import tensorflow.experimental.numpy as tnp

from . import _tf_random as random

if int(tf.__version__.split(".")[1]) < 5: # smaller than 2.5
jit_compile_argname = "experimental_compile"
else:
jit_compile_argname = "jit_compile"
function = tf.function(
autograph=False,
experimental_relax_shapes=True,
**{jit_compile_argname: False},
)
function_jit = tf.function(
autograph=False,
experimental_relax_shapes=True,
**{jit_compile_argname: True},
)
function_jit_fixedshape = tf.function(
autograph=False,
experimental_relax_shapes=False,
**{jit_compile_argname: True},
)

Tensor = tf.Tensor
Variable = tf.Variable
get_shape = tf.shape # get shape dynamically
assert_equal = tf.assert_equal
assert_greater_equal = tf.debugging.assert_greater_equal

is_eager = bool(os.environ.get("PHASESPACE_EAGER"))
tf.config.run_functions_eagerly(is_eager)

if BACKEND == BackendType.NUMPY:
import numpy as tnp

from . import _np_random as random

function = lambda x: x
function_jit = lambda x: x
function_jit_fixedshape = lambda x: x

Tensor = tnp.ndarray
Variable = tnp.ndarray
get_shape = tnp.shape

def assert_equal(x, y, message: str, name: str = "") -> None:
return tnp.testing.assert_equal(x, y, err_msg=message)

def assert_greater_equal(x, y, message: str, name: str = "") -> None:
return tnp.testing.assert_array_less(-x, -y, err_msg=message)
22 changes: 22 additions & 0 deletions phasespace/backend/_np_random.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from __future__ import annotations

__all__ = [
"Generator",
"from_seed",
"default_rng",
]

from typing import Optional

from numpy.random import PCG64, BitGenerator, Generator, default_rng


def from_seed(
seed,
alg: type[BitGenerator] | None = None,
) -> Generator:
"""Function that mimicks `tf.random.Generator.from_seed`."""
if alg is None:
alg = PCG64
bit_generator = alg(seed)
return Generator(bit_generator)
10 changes: 10 additions & 0 deletions phasespace/backend/_tf_random.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
__all__ = [
"Generator",
"from_seed",
"default_rng",
]

from tensorflow.random import Generator
from tensorflow.random import get_global_generator as default_rng

from_seed = Generator.from_seed
4 changes: 1 addition & 3 deletions phasespace/kinematics.py
Original file line number Diff line number Diff line change
@@ -6,9 +6,7 @@
# =============================================================================
"""Basic kinematics."""

import tensorflow.experimental.numpy as tnp

from phasespace.backend import function, function_jit
from phasespace.backend import function, function_jit, tnp


@function_jit
58 changes: 30 additions & 28 deletions phasespace/phasespace.py
Original file line number Diff line number Diff line change
@@ -17,14 +17,18 @@
from collections.abc import Callable
from math import pi

import tensorflow as tf
import tensorflow.experimental.numpy as tnp

from . import kinematics as kin
from .backend import function, function_jit_fixedshape
from .random import SeedLike, get_rng

RELAX_SHAPES = False
from .backend import (
Tensor,
Variable,
assert_equal,
assert_greater_equal,
function,
function_jit_fixedshape,
get_shape,
tnp,
)
from .random import SeedLike, generate_uniform, get_rng


def process_list_to_tensor(lst):
@@ -94,12 +98,10 @@ def __init__(self, name: str, mass: Callable | int | float) -> None: # noqa
self.name = name
self.children = []
self._mass_val = mass
if not callable(mass) and not isinstance(mass, tf.Variable):
if not callable(mass) and not isinstance(mass, Variable):
mass = tnp.asarray(mass, dtype=tnp.float64)
else:
mass = tf.function(
mass, autograph=False, experimental_relax_shapes=RELAX_SHAPES
)
mass = function(mass)
self._mass = mass
self._generate_called = False # not yet called, children can be set

@@ -129,11 +131,11 @@ def get_list_of_names(part):
@function
def get_mass(
self,
min_mass: tf.Tensor = None,
max_mass: tf.Tensor = None,
n_events: tf.Tensor | tf.Variable = None,
min_mass: Tensor = None,
max_mass: Tensor = None,
n_events: Union[Tensor, Variable] = None,
seed: SeedLike = None,
) -> tf.Tensor:
) -> Tensor:
"""Get the particle mass.

If the particle is resonant, the mass function will be called with the
@@ -246,11 +248,11 @@ def _preprocess(momentum, n_events):
if n_events is not None:
momentum_shape = momentum.shape[0]
if momentum_shape is None:
momentum_shape = tf.shape(momentum)[0]
momentum_shape = get_shape(momentum)[0]
momentum_shape = tnp.asarray(momentum_shape, tnp.int64)
else:
momentum_shape = tnp.asarray(momentum_shape, dtype=tnp.int64)
tf.assert_equal(
assert_equal(
n_events,
momentum_shape,
message="Conflicting inputs -> momentum_shape and n_events",
@@ -260,7 +262,7 @@ def _preprocess(momentum, n_events):
if len(momentum.shape) == 2:
n_events = momentum.shape[0]
if n_events is None: # dynamic shape
n_events = tf.shape(momentum)[0]
n_events = get_shape(momentum)[0]
n_events = tnp.asarray(n_events, dtype=tnp.int64)
else:
n_events = tnp.asarray(1, dtype=tnp.int64)
@@ -339,7 +341,7 @@ def recurse_stable(part):
# if len(masses.shape) == 1:
# masses = tnp.expand_dims(masses, axis=0)
available_mass = top_mass - tnp.sum(masses, axis=1, keepdims=True)
tf.debugging.assert_greater_equal(
assert_greater_equal(
available_mass,
tnp.zeros_like(available_mass, dtype=tnp.float64),
message="Forbidden decay",
@@ -348,7 +350,7 @@ def recurse_stable(part):
w_max = self._get_w_max(available_mass, masses)
p_top_boost = kin.boost_components(p_top)
# Start the generation
random_numbers = rng.uniform((n_events, n_particles - 2), dtype=tnp.float64)
random_numbers = generate_uniform(rng, shape=(n_events, n_particles - 2))
random = tnp.concatenate(
[
tnp.zeros((n_events, 1), dtype=tnp.float64),
@@ -425,14 +427,14 @@ def _generate_part2(inv_masses, masses, n_events, n_particles, rng):
)
)

cos_z = tnp.asarray(2.0, dtype=tnp.float64) * rng.uniform(
(n_events, 1), dtype=tnp.float64
cos_z = tnp.asarray(2.0, dtype=tnp.float64) * generate_uniform(
rng, shape=(n_events, 1)
) - tnp.asarray(1.0, dtype=tnp.float64)
sin_z = tnp.sqrt(tnp.asarray(1.0, dtype=tnp.float64) - cos_z * cos_z)
ang_y = (
tnp.asarray(2.0, dtype=tnp.float64)
* tnp.asarray(pi, dtype=tnp.float64)
* rng.uniform((n_events, 1), dtype=tnp.float64)
* generate_uniform(rng, shape=(n_events, 1))
)
cos_y = tnp.cos(ang_y)
sin_y = tnp.sin(ang_y)
@@ -610,11 +612,11 @@ def recurse_w_max(parent_mass, current_mass_tree):

def generate(
self,
n_events: int | tf.Tensor | tf.Variable,
boost_to: tf.Tensor | None = None,
n_events: Union[int, Tensor, Variable],
boost_to: Optional[Tensor] = None,
normalize_weights: bool = True,
seed: SeedLike = None,
) -> tuple[tf.Tensor, dict[str, tf.Tensor]]:
) -> Tuple[Tensor, Dict[str, Tensor]]:
"""Generate normalized n-body phase space as tensorflow tensors.

Any TensorFlow tensor can always be converted to a numpy array with the method `numpy()`.
@@ -657,8 +659,8 @@ def generate(
f"The number of events requested ({n_events}) doesn't match the boost_to input size "
f"of {boost_to.shape}"
)
tf.assert_equal(tf.shape(boost_to)[0], tf.shape(n_events), message=message)
if not isinstance(n_events, tf.Variable):
assert_equal(len(boost_to), n_events, message=message)
if not isinstance(n_events, Variable):
n_events = tnp.asarray(n_events, dtype=tnp.int64)
weights, weights_max, parts, _ = self._recursive_generate(
n_events=n_events,
23 changes: 17 additions & 6 deletions phasespace/random.py
Original file line number Diff line number Diff line change
@@ -5,14 +5,16 @@
It mimicks the TensorFlows API on random generators and relies (currently) in global states on the TF states.
Especially on the global random number generator which will be used to get new generators.
"""
from __future__ import annotations

from typing import Optional, Union

import tensorflow as tf
from phasespace.backend import random, tnp

SeedLike = Optional[Union[int, tf.random.Generator]]
SeedLike = Optional[Union[int, random.Generator]]


def get_rng(seed: SeedLike = None) -> tf.random.Generator:
def get_rng(seed: SeedLike = None) -> random.Generator:
"""Get or create a random number generators of type `tf.random.Generator`.

This can be used to either retrieve random number generators deterministically from them
@@ -33,9 +35,18 @@ def get_rng(seed: SeedLike = None) -> tf.random.Generator:
A list of `tf.random.Generator`
"""
if seed is None:
rng = tf.random.get_global_generator()
elif not isinstance(seed, tf.random.Generator): # it's a seed, not an rng
rng = tf.random.Generator.from_seed(seed=seed)
rng = random.default_rng()
elif not isinstance(seed, random.Generator): # it's a seed, not an rng
rng = random.from_seed(seed)
else:
rng = seed
return rng


def generate_uniform(
rng: random.Generator, shape: tuple[int, ...], minval=0, maxval=1, dtype=tnp.float64
) -> tnp.ndarray:
try:
return rng.uniform(shape, minval=minval, maxval=maxval, dtype=dtype)
except TypeError:
return rng.uniform(low=minval, high=maxval, size=shape).astype(dtype)
4 changes: 4 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -60,6 +60,7 @@ test =
numpy
pytest
pytest-cov
pytest-env
pytest-xdist
scipy
uproot>=4.0,<5.0
@@ -102,10 +103,13 @@ include =
phasespace/*

[tool:pytest]
env =
D:PHASESPACE_BACKEND=TF
addopts =
--color=yes
--ignore=setup.py
filterwarnings =
ignore:.*the imp module is deprecated in favour of importlib.*:DeprecationWarning
ignore:.*invalid value encountered in true_divide.*:RuntimeWarning
norecursedirs =
tests/helpers
8 changes: 8 additions & 0 deletions tests/helpers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import pytest

from phasespace.backend import BACKEND, BackendType

tf_only = pytest.mark.skipif(
BACKEND != BackendType.TENSORFLOW,
reason="Test requires tensorflow",
)
27 changes: 15 additions & 12 deletions tests/helpers/decays.py
Original file line number Diff line number Diff line change
@@ -6,10 +6,13 @@
# =============================================================================
"""Some physics models to test with."""

import tensorflow as tf
import tensorflow_probability as tfp
try:
import tensorflow_probability as tfp
except ImportError:
pass

from phasespace import GenParticle
from phasespace.backend import tnp

# Use RapidSim values (https://github.com/gcowan/RapidSim/blob/master/config/particles.dat)
B0_MASS = 5279.58
@@ -25,12 +28,12 @@ def b0_to_kstar_gamma(kstar_width=KSTARZ_WIDTH):
"""Generate B0 -> K*gamma."""

def kstar_mass(min_mass, max_mass, n_events):
min_mass = tf.cast(min_mass, tf.float64)
max_mass = tf.cast(max_mass, tf.float64)
kstar_width_cast = tf.cast(kstar_width, tf.float64)
kstar_mass_cast = tf.cast(KSTARZ_MASS, dtype=tf.float64)
min_mass = tnp.asarray(min_mass, tnp.float64)
max_mass = tnp.asarray(max_mass, tnp.float64)
kstar_width_cast = tnp.asarray(kstar_width, tnp.float64)
kstar_mass_cast = tnp.asarray(KSTARZ_MASS, dtype=tnp.float64)

kstar_mass = tf.broadcast_to(kstar_mass_cast, shape=(n_events,))
kstar_mass = tnp.broadcast_to(kstar_mass_cast, shape=(n_events,))
if kstar_width > 0:
kstar_mass = tfp.distributions.TruncatedNormal(
loc=kstar_mass, scale=kstar_width_cast, low=min_mass, high=max_mass
@@ -49,11 +52,11 @@ def bp_to_k1_kstar_pi_gamma(k1_width=K1_WIDTH, kstar_width=KSTARZ_WIDTH):
"""Generate B+ -> K1 (-> K* (->K pi) pi) gamma."""

def res_mass(mass, width, min_mass, max_mass, n_events):
mass = tf.cast(mass, tf.float64)
width = tf.cast(width, tf.float64)
min_mass = tf.cast(min_mass, tf.float64)
max_mass = tf.cast(max_mass, tf.float64)
masses = tf.broadcast_to(mass, shape=(n_events,))
mass = tnp.asarray(mass, tnp.float64)
width = tnp.asarray(width, tnp.float64)
min_mass = tnp.asarray(min_mass, tnp.float64)
max_mass = tnp.asarray(max_mass, tnp.float64)
masses = tnp.broadcast_to(mass, shape=(n_events,))
if kstar_width > 0:
masses = tfp.distributions.TruncatedNormal(
loc=masses, scale=width, low=min_mass, high=max_mass
9 changes: 3 additions & 6 deletions tests/test_chain.py
Original file line number Diff line number Diff line change
@@ -6,17 +6,12 @@
# =============================================================================
"""Test decay chain tools."""

import os
import sys

import numpy as np
import pytest

from phasespace import GenParticle

sys.path.append(os.path.dirname(__file__))

from .helpers import decays # noqa: E402
from .helpers import decays, tf_only


def test_name_clashes():
@@ -93,6 +88,7 @@ def test_resonance_top():
kstar.generate(n_events=1)


@tf_only
def test_kstargamma():
"""Test B0 -> K*gamma."""
decay = decays.b0_to_kstar_gamma()
@@ -104,6 +100,7 @@ def test_kstargamma():
assert all(part.shape == (1000, 4) for part in particles.values())


@tf_only
def test_k1gamma():
"""Test B+ -> K1 (K*pi) gamma."""
decay = decays.bp_to_k1_kstar_pi_gamma()
7 changes: 1 addition & 6 deletions tests/test_generate.py
Original file line number Diff line number Diff line change
@@ -6,17 +6,12 @@
# =============================================================================
"""Basic dimensionality tests."""

import os
import sys

import numpy as np
import pytest

import phasespace

sys.path.append(os.path.dirname(__file__))

from .helpers import decays # noqa: E402
from .helpers import decays

B0_MASS = decays.B0_MASS
PION_MASS = decays.PION_MASS
17 changes: 10 additions & 7 deletions tests/test_physics.py
Original file line number Diff line number Diff line change
@@ -19,24 +19,25 @@
matplotlib.use("TkAgg")

import os
import sys

import matplotlib.pyplot as plt
import tensorflow as tf
import uproot
import uproot4

from phasespace import phasespace
from phasespace.backend import tnp

sys.path.append(os.path.dirname(__file__))

from .helpers import decays, rapidsim # noqa: E402
from .helpers.plotting import make_norm_histo # noqa: E402
from .helpers import decays, rapidsim, tf_only
from .helpers.plotting import make_norm_histo

BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
PLOT_DIR = os.path.join(BASE_PATH, "tests", "plots")


def setup_method():
import tensorflow as tf

phasespace.GenParticle._sess.close()
tf.compat.v1.reset_default_graph()

@@ -83,15 +84,15 @@ def create_ref_histos(n_pions):
def run_test(n_particles, test_prefix):
first_run_n_events = 100
main_run_n_events = 100000
n_events = tf.Variable(initial_value=first_run_n_events, dtype=tf.int64)
n_events = tnp.asarray(first_run_n_events, dtype=tnp.int64)

decay = phasespace.nbody_decay(decays.B0_MASS, [decays.PION_MASS] * n_particles)
generate = decay.generate(n_events)
weights1, _ = generate # only generate to test change in n_events
assert len(weights1) == first_run_n_events

# change n_events and run again
n_events.assign(main_run_n_events)
n_events = tnp.asarray(main_run_n_events, dtype=tnp.int64)
weights, particles = decay.generate(n_events)
parts = np.concatenate(
[particles[f"p_{part_num}"] for part_num in range(n_particles)], axis=1
@@ -268,6 +269,7 @@ def test_kstargamma_kstarnonresonant_lhc():
assert np.all(p_values > 0.05)


@tf_only
def test_kstargamma_resonant_at_rest():
"""Test B0 -> K* gamma physics with Gaussian mass for K*.
@@ -382,6 +384,7 @@ def test_k1gamma_kstarnonresonant_lhc():
assert np.all(p_values > 0.05)


@tf_only
def test_k1gamma_resonant_at_rest():
"""Test B0 -> K1 (->K*pi) gamma physics.
24 changes: 18 additions & 6 deletions tests/test_random.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,38 @@
import numpy as np
import pytest
import tensorflow as tf

import phasespace as phsp

from .helpers import tf_only


def create_from_seed_input():
import tensorflow as tf

return tf.random.Generator.from_seed(15)


@tf_only
@pytest.mark.parametrize(
"seed", [lambda: 15, lambda: tf.random.Generator.from_seed(15)]
"seed",
[
lambda: 15,
create_from_seed_input,
],
)
def test_get_rng(seed):
rng1 = phsp.random.get_rng(seed())
rng2 = phsp.random.get_rng(seed())
rnd1_seeded = rng1.uniform_full_int(shape=(100,))
rnd2_seeded = rng2.uniform_full_int(shape=(100,))
rnd1_seeded = rng1.uniform(shape=(100,))
rnd2_seeded = rng2.uniform(shape=(100,))

rng3 = phsp.random.get_rng()
rng4 = phsp.random.get_rng(seed())
# advance rng4 by one step
_ = rng4.split(1)

rnd3 = rng3.uniform_full_int(shape=(100,))
rnd4 = rng4.uniform_full_int(shape=(100,))
rnd3 = rng3.uniform(shape=(100,))
rnd4 = rng4.uniform(shape=(100,))

np.testing.assert_array_equal(rnd1_seeded, rnd2_seeded)
assert not np.array_equal(rnd1_seeded, rnd3)