Skip to content

Commit

Permalink
Merge pull request #263 from smash-transport/sass/rename_particle_fun…
Browse files Browse the repository at this point in the history
…ctions

Sass/rename particle functions
  • Loading branch information
nilssass authored Aug 7, 2024
2 parents dbefd2b + 95d87b2 commit 69412dc
Show file tree
Hide file tree
Showing 25 changed files with 339 additions and 140 deletions.
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,19 @@ Date:

### Changed
* Filter: Perform general clean up to reduce code duplications
* Particle: Rename several methods for a more intuitive naming scheme. Renamed methods are:
| Old Method Name | New Method Name |
|-------------------------------------|-----------------------------|
| momentum_rapidity_Y() | rapidity() |
| spatial_rapidity() | spacetime_rapidity() |
| spatial_rapidity_cut() | spacetime_rapidity_cut() |
| pt_abs() | pT_abs() |
| pt_cut() | pT_cut() |
| compute_mass_from_energy_momentum() | mass_from_energy_momentum() |
| compute_charge_from_pdg() | charge_from_pdg() |

### Added
* Tests: Add HelperFunctions.py that contains free utility functions used in the tests and add corresponding tests.

## v1.3.0-Newton
Date: 2024-07-25
Expand Down
2 changes: 1 addition & 1 deletion docs/source/classes/Jetscape/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ Jetscape
.. automethod:: Jetscape.charged_particles
.. automethod:: Jetscape.uncharged_particles
.. automethod:: Jetscape.strange_particles
.. automethod:: Jetscape.pt_cut
.. automethod:: Jetscape.pT_cut
.. automethod:: Jetscape.rapidity_cut
.. automethod:: Jetscape.pseudorapidity_cut
.. automethod:: Jetscape.multiplicity_cut
Expand Down
4 changes: 2 additions & 2 deletions docs/source/classes/Oscar/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ Oscar
.. automethod:: Oscar.charged_particles
.. automethod:: Oscar.uncharged_particles
.. automethod:: Oscar.strange_particles
.. automethod:: Oscar.pt_cut
.. automethod:: Oscar.pT_cut
.. automethod:: Oscar.rapidity_cut
.. automethod:: Oscar.pseudorapidity_cut
.. automethod:: Oscar.spatial_rapidity_cut
.. automethod:: Oscar.spacetime_rapidity_cut
.. automethod:: Oscar.multiplicity_cut
.. automethod:: Oscar.print_particle_lists_to_file
8 changes: 4 additions & 4 deletions docs/source/classes/Particle/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@ Particle

.. automethod:: Particle.print_particle
.. automethod:: Particle.angular_momentum
.. automethod:: Particle.momentum_rapidity_Y
.. automethod:: Particle.rapidity
.. automethod:: Particle.p_abs
.. automethod:: Particle.pt_abs
.. automethod:: Particle.pT_abs
.. automethod:: Particle.phi
.. automethod:: Particle.theta
.. automethod:: Particle.pseudorapidity
.. automethod:: Particle.spatial_rapidity
.. automethod:: Particle.spacetime_rapidity
.. automethod:: Particle.proper_time
.. automethod:: Particle.compute_mass_from_energy_momentum
.. automethod:: Particle.mass_from_energy_momentum
.. automethod:: Particle.compute_charge_from_pdg
.. automethod:: Particle.mT
.. automethod:: Particle.is_meson
Expand Down
1 change: 0 additions & 1 deletion src/sparkx/CentralityClasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ def __create_centrality_classes(self) -> None:

MinRecord = int(number_events / 4 * self.centrality_bins_[0] / 100.0)
for i in range(1, len(self.centrality_bins_)):

MaxRecord = int(
number_events / 4 * self.centrality_bins_[i] / 100.0
)
Expand Down
2 changes: 1 addition & 1 deletion src/sparkx/EventCharacteristics.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,7 @@ def generate_eBQS_densities_Minkowski_from_OSCAR_IC(
"n_sigma_x, n_sigma_y, n_sigma_z must be positive float or int"
)
if (IC_info is not None) and not isinstance(IC_info, str):
warnings.warn("The given IC_info is not a string")
raise TypeError("The given IC_info is not a string")
if not isinstance(output_filename, str):
raise TypeError("output_filename must be a string")
if self.has_lattice_:
Expand Down
30 changes: 15 additions & 15 deletions src/sparkx/Filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ def spacetime_cut(particle_list, dim, cut_value_tuple):
return updated_particle_list


def pt_cut(particle_list, cut_value_tuple):
def pT_cut(particle_list, cut_value_tuple):
"""
Apply p_t cut to all events by passing an acceptance range by
::code`cut_value_tuple`. All particles outside this range will
Expand Down Expand Up @@ -512,8 +512,8 @@ def pt_cut(particle_list, cut_value_tuple):
elem
for elem in particle_list[i]
if (
lim_min <= elem.pt_abs() <= lim_max
and not np.isnan(elem.pt_abs())
lim_min <= elem.pT_abs() <= lim_max
and not np.isnan(elem.pT_abs())
)
]
updated_particle_list.append(particle_list_tmp)
Expand Down Expand Up @@ -626,8 +626,8 @@ def rapidity_cut(particle_list, cut_value):
elem
for elem in particle_list[i]
if (
lim_min <= elem.momentum_rapidity_Y() <= lim_max
and not np.isnan(elem.momentum_rapidity_Y())
lim_min <= elem.rapidity() <= lim_max
and not np.isnan(elem.rapidity())
)
]
updated_particle_list.append(particle_list_tmp)
Expand All @@ -642,8 +642,8 @@ def rapidity_cut(particle_list, cut_value):
elem
for elem in particle_list[i]
if (
-limit <= elem.momentum_rapidity_Y() <= limit
and not np.isnan(elem.momentum_rapidity_Y())
-limit <= elem.rapidity() <= limit
and not np.isnan(elem.rapidity())
)
]
updated_particle_list.append(particle_list_tmp)
Expand Down Expand Up @@ -726,10 +726,10 @@ def pseudorapidity_cut(particle_list, cut_value):
return updated_particle_list


def spatial_rapidity_cut(particle_list, cut_value):
def spacetime_rapidity_cut(particle_list, cut_value):
"""
Apply spatial rapidity (space-time rapidity) cut to all events and
remove all particles with spatial rapidity not complying with cut_value.
Apply space-time rapidity cut to all events and remove all particles with
space-time rapidity not complying with cut_value.
Parameters
----------
Expand All @@ -743,7 +743,7 @@ def spatial_rapidity_cut(particle_list, cut_value):
in [-1.0, 1.0] are kept.
cut_value : tuple
To specify an asymmetric acceptance range for the spatial rapidity
To specify an asymmetric acceptance range for the space-time rapidity
of particles, pass a tuple (cut_min, cut_max)
Returns
Expand All @@ -764,8 +764,8 @@ def spatial_rapidity_cut(particle_list, cut_value):
elem
for elem in particle_list[i]
if (
lim_min <= elem.spatial_rapidity() <= lim_max
and not np.isnan(elem.spatial_rapidity())
lim_min <= elem.spacetime_rapidity() <= lim_max
and not np.isnan(elem.spacetime_rapidity())
)
]
updated_particle_list.append(particle_list_tmp)
Expand All @@ -780,8 +780,8 @@ def spatial_rapidity_cut(particle_list, cut_value):
elem
for elem in particle_list[i]
if (
-limit <= elem.spatial_rapidity() <= limit
and not np.isnan(elem.spatial_rapidity())
-limit <= elem.spacetime_rapidity() <= limit
and not np.isnan(elem.spacetime_rapidity())
)
]
updated_particle_list.append(particle_list_tmp)
Expand Down
1 change: 0 additions & 1 deletion src/sparkx/Histogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,6 @@ def __init__(self, bin_boundaries):
self.systematic_error_ = np.asarray([np.zeros(num_bins)])

elif isinstance(bin_boundaries, (list, np.ndarray)):

self.number_of_bins_ = len(bin_boundaries) - 1
self.bin_edges_ = np.asarray(bin_boundaries)
self.histograms_ = np.asarray([np.zeros(self.number_of_bins_)])
Expand Down
16 changes: 8 additions & 8 deletions src/sparkx/Jetscape.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class Jetscape:
Keep strange particles only
particle_status:
Keep only particles with a given status flag
pt_cut:
pT_cut:
Apply pT cut to all particles
rapidity_cut:
Apply rapidity cut to all particles
Expand Down Expand Up @@ -402,8 +402,8 @@ def __apply_kwargs_filters(self, event, filters_dict):
event = lower_event_energy_cut(
event, filters_dict["lower_event_energy_cut"]
)
elif i == "pt_cut":
event = pt_cut(event, filters_dict["pt_cut"])
elif i == "pT_cut":
event = pT_cut(event, filters_dict["pT_cut"])
elif i == "mT_cut":
event = mT_cut(event, filters_dict["mT_cut"])
elif i == "rapidity_cut":
Expand All @@ -412,9 +412,9 @@ def __apply_kwargs_filters(self, event, filters_dict):
event = pseudorapidity_cut(
event, filters_dict["pseudorapidity_cut"]
)
elif i == "spatial_rapidity_cut":
event = spatial_rapidity_cut(
event, filters_dict["spatial_rapidity_cut"]
elif i == "spacetime_rapidity_cut":
event = spacetime_rapidity_cut(
event, filters_dict["spacetime_rapidity_cut"]
)
elif i == "multiplicity_cut":
event = multiplicity_cut(
Expand Down Expand Up @@ -761,7 +761,7 @@ def lower_event_energy_cut(self, minimum_event_energy):

return self

def pt_cut(self, cut_value_tuple):
def pT_cut(self, cut_value_tuple):
"""
Apply transverse momentum cut to all events by passing an acceptance
range by ::code`cut_value_tuple`. All particles outside this range will
Expand All @@ -781,7 +781,7 @@ def pt_cut(self, cut_value_tuple):
Containing only particles complying with the transverse momentum
cut for all events
"""
self.particle_list_ = pt_cut(self.particle_list_, cut_value_tuple)
self.particle_list_ = pT_cut(self.particle_list_, cut_value_tuple)
self.__update_num_output_per_event_after_filter()

return self
Expand Down
12 changes: 11 additions & 1 deletion src/sparkx/Lattice3D.py
Original file line number Diff line number Diff line change
Expand Up @@ -1231,7 +1231,17 @@ def interpolate_to_lattice_new_extent(
for i, x in enumerate(x_new):
for j, y in enumerate(y_new):
for k, z in enumerate(z_new):
value = self.interpolate_value(x, y, z)
if (
x < self.x_min_
or x > self.x_max_
or y < self.y_min_
or y > self.y_max_
or z < self.z_min_
or z > self.z_max_
):
value = 0
else:
value = self.interpolate_value(x, y, z)
new_lattice.set_value_by_index(i, j, k, value)

return new_lattice
Expand Down
3 changes: 1 addition & 2 deletions src/sparkx/MultiParticlePtCorrelations.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@ class MultiParticlePtCorrelations:
"""

def __init__(self, max_order: int) -> None:

self.max_order = max_order
# Check if max_order is an integer
if not isinstance(self.max_order, int):
Expand Down Expand Up @@ -125,7 +124,7 @@ def _P_W_k(
# if particle.weight is np.nan, then set it to 1
if np.isnan(particle.weight):
particle.weight = 1.0
Pk[k] += (particle.weight * particle.pt_abs()) ** (k + 1)
Pk[k] += (particle.weight * particle.pT_abs()) ** (k + 1)
Wk[k] += particle.weight ** (k + 1)
return (Pk, Wk)

Expand Down
30 changes: 15 additions & 15 deletions src/sparkx/Oscar.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,14 +117,14 @@ class Oscar:
Keep strange particles only
spacetime_cut:
Apply spacetime cut to all particles
pt_cut:
pT_cut:
Apply pT cut to all particles
rapidity_cut:
Apply rapidity cut to all particles
pseudorapidity_cut:
Apply pseudorapidity cut to all particles
spatial_rapidity_cut:
Apply spatial rapidity (space-time rapidity) cut to all particles
spacetime_rapidity_cut:
Apply space-time rapidity cut to all particles
multiplicity_cut:
Apply multiplicity cut to all particles
print_particle_lists_to_file:
Expand Down Expand Up @@ -465,8 +465,8 @@ def __apply_kwargs_filters(self, event, filters_dict):
filters_dict["spacetime_cut"][0],
filters_dict["spacetime_cut"][1],
)
elif i == "pt_cut":
event = pt_cut(event, filters_dict["pt_cut"])
elif i == "pT_cut":
event = pT_cut(event, filters_dict["pT_cut"])
elif i == "mT_cut":
event = mT_cut(event, filters_dict["mT_cut"])
elif i == "rapidity_cut":
Expand All @@ -475,9 +475,9 @@ def __apply_kwargs_filters(self, event, filters_dict):
event = pseudorapidity_cut(
event, filters_dict["pseudorapidity_cut"]
)
elif i == "spatial_rapidity_cut":
event = spatial_rapidity_cut(
event, filters_dict["spatial_rapidity_cut"]
elif i == "spacetime_rapidity_cut":
event = spacetime_rapidity_cut(
event, filters_dict["spacetime_rapidity_cut"]
)
elif i == "multiplicity_cut":
event = multiplicity_cut(
Expand Down Expand Up @@ -956,7 +956,7 @@ def spacetime_cut(self, dim, cut_value_tuple):

return self

def pt_cut(self, cut_value_tuple):
def pT_cut(self, cut_value_tuple):
"""
Apply transverse momentum cut to all events by passing an acceptance
range by ::code`cut_value_tuple`. All particles outside this range will
Expand All @@ -977,7 +977,7 @@ def pt_cut(self, cut_value_tuple):
cut for all events
"""

self.particle_list_ = pt_cut(self.particle_list_, cut_value_tuple)
self.particle_list_ = pT_cut(self.particle_list_, cut_value_tuple)
self.__update_num_output_per_event_after_filter()

return self
Expand Down Expand Up @@ -1066,10 +1066,10 @@ def pseudorapidity_cut(self, cut_value):

return self

def spatial_rapidity_cut(self, cut_value):
def spacetime_rapidity_cut(self, cut_value):
"""
Apply spatial rapidity (space-time rapidity) cut to all events and
remove all particles with spatial rapidity not complying with cut_value
Apply space-time rapidity cut to all events and remove all particles
with space-time rapidity not complying with cut_value
Parameters
----------
Expand All @@ -1086,11 +1086,11 @@ def spatial_rapidity_cut(self, cut_value):
Returns
-------
self : Oscar object
Containing only particles complying with the spatial rapidity cut
Containing only particles complying with the space-time rapidity cut
for all events
"""

self.particle_list_ = spatial_rapidity_cut(
self.particle_list_ = spacetime_rapidity_cut(
self.particle_list_, cut_value
)
self.__update_num_output_per_event_after_filter()
Expand Down
Loading

0 comments on commit 69412dc

Please sign in to comment.