Skip to content

Commit

Permalink
Merge pull request #4696 from jzuhone/extend_sph_stream
Browse files Browse the repository at this point in the history
  • Loading branch information
neutrinoceros authored Nov 1, 2023
2 parents 8a43695 + d105afd commit 7026229
Show file tree
Hide file tree
Showing 3 changed files with 115 additions and 40 deletions.
94 changes: 58 additions & 36 deletions doc/source/examining/Loading_Generic_Particle_Data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@
"outputs": [],
"source": [
"data = {\n",
" \"particle_position_x\": ppx,\n",
" \"particle_position_y\": ppy,\n",
" \"particle_position_z\": ppz,\n",
" \"particle_mass\": ppm,\n",
" (\"io\", \"particle_position_x\"): ppx,\n",
" (\"io\", \"particle_position_y\"): ppy,\n",
" (\"io\", \"particle_position_z\"): ppz,\n",
" (\"io\", \"particle_mass\"): ppm,\n",
"}"
]
},
Expand Down Expand Up @@ -96,7 +96,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"This new dataset acts like any other yt `Dataset` object, and can be used to create data objects and query for yt fields. This example shows how to access \"deposit\" fields:"
"This new dataset acts like any other yt `Dataset` object, and can be used to create data objects and query for yt fields."
]
},
{
Expand All @@ -108,25 +108,15 @@
"outputs": [],
"source": [
"ad = ds.all_data()\n",
"\n",
"# This is generated with \"cloud-in-cell\" interpolation.\n",
"cic_density = ad[\"deposit\", \"all_cic\"]\n",
"\n",
"# These three are based on nearest-neighbor cell deposition\n",
"nn_density = ad[\"deposit\", \"all_density\"]\n",
"nn_deposited_mass = ad[\"deposit\", \"all_mass\"]\n",
"particle_count_per_cell = ad[\"deposit\", \"all_count\"]"
"print(ad.mean((\"io\", \"particle_position_x\")))\n",
"print(ad.sum((\"io\", \"particle_mass\")))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"cell_type": "markdown",
"metadata": {},
"source": [
"ds.field_list"
"We can project the particle mass field like so:"
]
},
{
Expand All @@ -137,17 +127,8 @@
},
"outputs": [],
"source": [
"ds.derived_field_list"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"slc = yt.SlicePlot(ds, 2, (\"deposit\", \"all_cic\"))\n",
"slc.set_width((8, \"Mpc\"))"
"prj = yt.ParticleProjectionPlot(ds, \"z\", (\"io\", \"particle_mass\"))\n",
"prj.set_width((8, \"Mpc\"))"
]
},
{
Expand All @@ -163,16 +144,45 @@
"metadata": {},
"outputs": [],
"source": [
"n_gas_particles = 1000000\n",
"n_star_particles = 1000000\n",
"n_dm_particles = 2000000\n",
"\n",
"ppxg, ppyg, ppzg = 1e6 * np.random.normal(size=[3, n_gas_particles])\n",
"ppmg = np.ones(n_gas_particles)\n",
"hsml = 10000 * np.ones(n_gas_particles)\n",
"dens = 2.0e-4 * np.ones(n_gas_particles)\n",
"\n",
"ppxd, ppyd, ppzd = 1e6 * np.random.normal(size=[3, n_dm_particles])\n",
"ppmd = np.ones(n_dm_particles)\n",
"\n",
"ppxs, ppys, ppzs = 5e5 * np.random.normal(size=[3, n_star_particles])\n",
"ppms = 0.1 * np.ones(n_star_particles)\n",
"\n",
"bbox = 1.1 * np.array(\n",
" [\n",
" [\n",
" min(ppxg.min(), ppxd.min(), ppxs.min()),\n",
" max(ppxg.max(), ppxd.max(), ppxs.max()),\n",
" ],\n",
" [\n",
" min(ppyg.min(), ppyd.min(), ppys.min()),\n",
" max(ppyg.max(), ppyd.max(), ppys.max()),\n",
" ],\n",
" [\n",
" min(ppzg.min(), ppzd.min(), ppzs.min()),\n",
" max(ppzg.max(), ppzd.max(), ppzs.max()),\n",
" ],\n",
" ]\n",
")\n",
"\n",
"data2 = {\n",
" (\"gas\", \"particle_position_x\"): ppxg,\n",
" (\"gas\", \"particle_position_y\"): ppyg,\n",
" (\"gas\", \"particle_position_z\"): ppzg,\n",
" (\"gas\", \"particle_mass\"): ppmg,\n",
" (\"gas\", \"smoothing_length\"): hsml,\n",
" (\"gas\", \"density\"): dens,\n",
" (\"dm\", \"particle_position_x\"): ppxd,\n",
" (\"dm\", \"particle_position_y\"): ppyd,\n",
" (\"dm\", \"particle_position_z\"): ppzd,\n",
Expand All @@ -184,15 +194,25 @@
"}\n",
"\n",
"ds2 = yt.load_particles(\n",
" data2, length_unit=1.0 * parsec, mass_unit=1e8 * Msun, n_ref=256, bbox=bbox\n",
" data2, length_unit=1.0 * parsec, mass_unit=1e8 * Msun, bbox=bbox\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We now have separate `\"dm\"` and `\"star\"` particles, as well as their deposited fields:"
"We now have separate `\"gas\"`, `\"dm\"`, and `\"star\"` particles. Since the `\"gas\"` particles have `\"density\"` and `\"smoothing_length\"` fields, they are recognized as SPH particles:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ad = ds2.all_data()\n",
"c = np.array([ad.mean((\"gas\", ax)).to(\"code_length\") for ax in \"xyz\"])"
]
},
{
Expand All @@ -203,8 +223,10 @@
},
"outputs": [],
"source": [
"slc = yt.SlicePlot(ds2, 2, [(\"deposit\", \"dm_cic\"), (\"deposit\", \"star_cic\")])\n",
"slc.set_width((8, \"Mpc\"))"
"slc = yt.SlicePlot(ds2, \"z\", (\"gas\", \"density\"), center=c)\n",
"slc.set_zlim((\"gas\", \"density\"), 1e-19, 2.0e-18)\n",
"slc.set_width((4, \"Mpc\"))\n",
"slc.show()"
]
}
],
Expand All @@ -224,7 +246,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.1"
"version": "3.12.0"
}
},
"nbformat": 4,
Expand Down
12 changes: 8 additions & 4 deletions yt/frontends/stream/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,10 +560,14 @@ def __init__(
axis_order=axis_order,
)
fields = list(stream_handler.fields["stream_file"].keys())
# This is the current method of detecting SPH data.
# This should be made more flexible in the future.
if ("io", "density") in fields and ("io", "smoothing_length") in fields:
self._sph_ptypes = ("io",)
sph_ptypes = []
for ptype in self.particle_types:
if (ptype, "density") in fields and (ptype, "smoothing_length") in fields:
sph_ptypes.append(ptype)
if len(sph_ptypes) == 1:
self._sph_ptypes = tuple(sph_ptypes)
elif len(sph_ptypes) > 1:
raise ValueError("Multiple SPH particle types are currently not supported!")

def add_sph_fields(self, n_neighbors=32, kernel="cubic", sph_ptype="io"):
"""Add SPH fields for the specified particle type.
Expand Down
49 changes: 49 additions & 0 deletions yt/frontends/stream/tests/test_stream_particles.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,8 @@ def test_load_particles_types():
ds2 = load_particles(data2)
ds2.index

# We use set here because we don't care about the order and we just need
# the elements to be correct
assert set(ds2.particle_types) == {"all", "star", "dm", "nbody"}

dd = ds2.all_data()
Expand All @@ -218,6 +220,53 @@ def test_load_particles_types():
assert dd["all", f"particle_position_{ax}"].size == num_tot_particles


def test_load_particles_sph_types():
num_particles = 10000

data = {
("gas", "particle_position_x"): np.random.random(size=num_particles),
("gas", "particle_position_y"): np.random.random(size=num_particles),
("gas", "particle_position_z"): np.random.random(size=num_particles),
("gas", "particle_velocity_x"): np.random.random(size=num_particles),
("gas", "particle_velocity_y"): np.random.random(size=num_particles),
("gas", "particle_velocity_z"): np.random.random(size=num_particles),
("gas", "particle_mass"): np.ones(num_particles),
("gas", "density"): np.ones(num_particles),
("gas", "smoothing_length"): np.ones(num_particles),
("dm", "particle_position_x"): np.random.random(size=num_particles),
("dm", "particle_position_y"): np.random.random(size=num_particles),
("dm", "particle_position_z"): np.random.random(size=num_particles),
("dm", "particle_velocity_x"): np.random.random(size=num_particles),
("dm", "particle_velocity_y"): np.random.random(size=num_particles),
("dm", "particle_velocity_z"): np.random.random(size=num_particles),
("dm", "particle_mass"): np.ones(num_particles),
}

ds = load_particles(data)

assert set(ds.particle_types) == {"gas", "dm"}
assert ds._sph_ptypes == ("gas",)

data.update(
{
("cr_gas", "particle_position_x"): np.random.random(size=num_particles),
("cr_gas", "particle_position_y"): np.random.random(size=num_particles),
("cr_gas", "particle_position_z"): np.random.random(size=num_particles),
("cr_gas", "particle_velocity_x"): np.random.random(size=num_particles),
("cr_gas", "particle_velocity_y"): np.random.random(size=num_particles),
("cr_gas", "particle_velocity_z"): np.random.random(size=num_particles),
("cr_gas", "particle_mass"): np.ones(num_particles),
("cr_gas", "density"): np.ones(num_particles),
("cr_gas", "smoothing_length"): np.ones(num_particles),
}
)

with pytest.raises(
ValueError, match="Multiple SPH particle types are currently not supported!"
):
load_particles(data)


def test_load_particles_with_data_source():
ds1 = fake_particle_ds()

Expand Down

0 comments on commit 7026229

Please sign in to comment.