Skip to content

Commit

Permalink
Switch to field tuples for default fields in particle_trajectories (#…
Browse files Browse the repository at this point in the history
…4768)

Co-authored-by: Clément Robert <cr52@protonmail.com>
  • Loading branch information
mtryan83 and neutrinoceros authored Dec 25, 2023
1 parent 42eb142 commit d2088a1
Show file tree
Hide file tree
Showing 5 changed files with 154 additions and 79 deletions.
1 change: 1 addition & 0 deletions nose_ignores.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,4 @@
--ignore-file=test_version\.py
--ignore-file=test_gadget_pytest\.py
--ignore-file=test_vr_orientation\.py
--ignore-file=test_particle_trajectories_pytest\.py
1 change: 1 addition & 0 deletions tests/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ other_tests:
- "--ignore-file=test_version\\.py"
- "--ignore-file=test_gadget_pytest\\.py"
- "--ignore-file=test_vr_orientation\\.py"
- "--ignore-file=test_particle_trajectories_pytest\\.py"
- "--exclude-test=yt.frontends.gdf.tests.test_outputs.TestGDF"
- "--exclude-test=yt.frontends.adaptahop.tests.test_outputs"
- "--exclude-test=yt.frontends.stream.tests.test_stream_particles.test_stream_non_cartesian_particles"
Expand Down
6 changes: 3 additions & 3 deletions yt/data_objects/particle_trajectories.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def __init__(
raise YTIllDefinedParticleData(
"This dataset contains duplicate particle indices!"
) from e
self.field_data[field] = array_like_field(
self.field_data[fds[field]] = array_like_field(
dd_first, output_field.copy(), fds[field]
)
self.particle_fields.append(field)
Expand Down Expand Up @@ -339,8 +339,8 @@ def trajectory_from_index(self, index):
raise IndexError
fields = sorted(self.field_data.keys())
traj = {}
traj["particle_time"] = self.times
traj["particle_index"] = index
traj[(self.ptype, "particle_time")] = self.times
traj[(self.ptype, "particle_index")] = index
for field in fields:
traj[field] = self[field][mask, :][0]
return traj
Expand Down
76 changes: 0 additions & 76 deletions yt/data_objects/tests/test_particle_trajectories.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
import glob
import os

import numpy as np
from numpy.testing import assert_raises

from yt.config import ytcfg
from yt.data_objects.particle_filters import particle_filter
from yt.data_objects.time_series import DatasetSeries
from yt.testing import fake_particle_ds
from yt.utilities.answer_testing.framework import GenericArrayTest, requires_ds
from yt.utilities.exceptions import YTIllDefinedParticleData


def setup():
Expand Down Expand Up @@ -69,73 +63,3 @@ def field_func(name):
return traj[field] # noqa: B023

yield GenericArrayTest(ds, field_func, args=[field])


def test_uniqueness():
n_particles = 2
n_steps = 2
ids = np.arange(n_particles, dtype="int64") % (n_particles // 2)
data = {"particle_index": ids}
fields = [
"particle_position_x",
"particle_position_y",
"particle_position_z",
"particle_index",
]
negative = [False, False, False, False]
units = ["cm", "cm", "cm", "1"]

ts = DatasetSeries(
[
fake_particle_ds(
fields=fields,
negative=negative,
units=units,
npart=n_particles,
data=data,
)
for i in range(n_steps)
]
)

assert_raises(YTIllDefinedParticleData, ts.particle_trajectories, [0])


def test_ptype():
n_particles = 100
fields = [
"particle_position_x",
"particle_position_y",
"particle_position_z",
"particle_index",
"particle_dummy",
]
negative = [False, False, False, False, False]
units = ["cm", "cm", "cm", "1", "1"]

# Setup filters on the 'particle_dummy' field, keeping only the first 50
@particle_filter(name="dummy", requires=["particle_dummy"])
def dummy(pfilter, data):
return data[(pfilter.filtered_type, "particle_dummy")] <= n_particles // 2

# Setup fake particle datasets with repeated ids. This should work because
# the ids are unique among `dummy_particles` so let's test this
data = {
"particle_index": np.arange(n_particles) % (n_particles // 2),
"particle_dummy": np.arange(n_particles),
}
all_ds = [
fake_particle_ds(
fields=fields, negative=negative, units=units, npart=n_particles, data=data
)
]
for ds in all_ds:
ds.add_particle_filter("dummy")
ts = DatasetSeries(all_ds)

# Select all dummy particles
print(ts[0].derived_field_list)
ids = ts[0].all_data()["dummy", "particle_index"]

# Build trajectories
ts.particle_trajectories(ids, ptype="dummy")
149 changes: 149 additions & 0 deletions yt/data_objects/tests/test_particle_trajectories_pytest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
import numpy as np
import pytest
from numpy.testing import assert_raises

from yt.data_objects.particle_filters import particle_filter
from yt.data_objects.time_series import DatasetSeries
from yt.testing import fake_particle_ds
from yt.utilities.exceptions import YTIllDefinedParticleData

pfields = [
("all", "particle_position_x"),
("all", "particle_position_y"),
("all", "particle_position_z"),
]
vfields = [
("all", "particle_velocity_x"),
("all", "particle_velocity_y"),
("all", "particle_velocity_z"),
]


@pytest.fixture
def particle_trajectories_test_dataset():
n_particles = 2
n_steps = 2
ids = np.arange(n_particles, dtype="int64")
data = {"particle_index": ids}
fields = [
"particle_position_x",
"particle_position_y",
"particle_position_z",
"particle_velocity_x", # adding a non-default field
"particle_index",
]
negative = [False, False, False, True, False]
units = ["cm", "cm", "cm", "cm/s", "1"]

ts = DatasetSeries(
[
fake_particle_ds(
fields=fields,
negative=negative,
units=units,
npart=n_particles,
data=data,
)
for i in range(n_steps)
]
)
return ts


def test_uniqueness():
n_particles = 2
n_steps = 2
ids = np.arange(n_particles, dtype="int64") % (n_particles // 2)
data = {"particle_index": ids}
fields = [
"particle_position_x",
"particle_position_y",
"particle_position_z",
"particle_index",
]
negative = [False, False, False, False]
units = ["cm", "cm", "cm", "1"]

ts = DatasetSeries(
[
fake_particle_ds(
fields=fields,
negative=negative,
units=units,
npart=n_particles,
data=data,
)
for i in range(n_steps)
]
)

assert_raises(YTIllDefinedParticleData, ts.particle_trajectories, [0])


def test_ptype():
n_particles = 100
fields = [
"particle_position_x",
"particle_position_y",
"particle_position_z",
"particle_index",
"particle_dummy",
]
negative = [False, False, False, False, False]
units = ["cm", "cm", "cm", "1", "1"]

# Setup filters on the 'particle_dummy' field, keeping only the first 50
@particle_filter(name="dummy", requires=["particle_dummy"])
def dummy(pfilter, data):
return data[(pfilter.filtered_type, "particle_dummy")] <= n_particles // 2

# Setup fake particle datasets with repeated ids. This should work because
# the ids are unique among `dummy_particles` so let's test this
data = {
"particle_index": np.arange(n_particles) % (n_particles // 2),
"particle_dummy": np.arange(n_particles),
}
all_ds = [
fake_particle_ds(
fields=fields, negative=negative, units=units, npart=n_particles, data=data
)
]
for ds in all_ds:
ds.add_particle_filter("dummy")
ts = DatasetSeries(all_ds)

# Select all dummy particles
ids = ts[0].all_data()["dummy", "particle_index"]

# Build trajectories
ts.particle_trajectories(ids, ptype="dummy")


@pytest.mark.parametrize("ptype", [None, "io"])
def test_default_field_tuple(particle_trajectories_test_dataset, ptype):
ds = particle_trajectories_test_dataset[0]
ids = ds.all_data()[("all", "particle_index")]
trajs = particle_trajectories_test_dataset.particle_trajectories(
ids, ptype=ptype, suppress_logging=True
)
ptype = ptype if ptype else "all" # ptype defaults to "all"
for k in trajs.field_data.keys():
assert isinstance(k, tuple), f"Expected key to be tuple, received {type(k)}"
assert (
k[0] == ptype
), f"Default field type ({k[0]}) does not match expected ({ptype})"
assert ("all", k[1]) in pfields, f"Unexpected field: {k[1]}"


@pytest.mark.parametrize("ptype", [None, "io"])
def test_time_and_index(particle_trajectories_test_dataset, ptype):
ds = particle_trajectories_test_dataset[0]
ids = ds.all_data()[("all", "particle_index")]
trajs = particle_trajectories_test_dataset.particle_trajectories(
ids, ptype=ptype, suppress_logging=True
)
ptype = ptype if ptype else "all" # ptype defaults to "all"
traj = trajs.trajectory_from_index(1)
for field in ["particle_time", "particle_index"]:
assert (ptype, field) in traj.keys(), f"Missing ({ptype},{field})"
assert (field) not in traj.keys(), f"{field} present as bare string"

0 comments on commit d2088a1

Please sign in to comment.