Skip to content

update(Mf6Splitter): change how node mapping is stored and loaded #2465

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Mar 14, 2025
66 changes: 66 additions & 0 deletions .docs/Notebooks/mf6_parallel_model_splitting_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -768,3 +768,69 @@ def string2geom(geostring, conversion=None):
sa = np.array(seg)
ax.plot(sa[:, 0], sa[:, 1], "b-")
# -

# ### Save node mapping data
#
# The `save_node_mapping()` method allows users to save a HDF5 representation of model splitter information that can be reloaded and used to reconstruct arrays at a later time

# +
filename = workspace / "node_mapping.hdf5"
mfsplit.save_node_mapping(filename)
# -

# ### reloading node mapping data
#
# The `load_node_mapping()` method allows the user to instantiate a Mf6Splitter object from a hdf5 node mapping file for reconstructing output arrays

# +
mfs = Mf6Splitter.load_node_mapping(filename)
# -

# Reconstruct heads using the `Mf6Splitter` object we just created

# +
model_names = list(new_sim.model_names)
head_dict = {}
for modelname in model_names:
mnum = int(modelname.split("_")[-1])
head = new_sim.get_model(modelname).output.head().get_alldata()[-1]
head_dict[mnum] = head

ra_heads = mfs.reconstruct_array(head_dict)
ra_watertable = flopy.utils.postprocessing.get_water_table(ra_heads)
# -

# +
with styles.USGSMap():
fig, axs = plt.subplots(nrows=3, figsize=(8, 12))
diff = ra_heads - heads
hv = [ra_heads, heads, diff]
titles = ["Multiple models", "Single model", "Multiple - single"]
for idx, ax in enumerate(axs):
ax.set_aspect("equal")
ax.set_title(titles[idx])

if idx < 2:
levels = contours
vmin = hmin
vmax = hmax
else:
levels = None
vmin = None
vmax = None

pmv = flopy.plot.PlotMapView(modelgrid=gwf.modelgrid, ax=ax, layer=0)
h = pmv.plot_array(hv[idx], vmin=vmin, vmax=vmax)
if levels is not None:
c = pmv.contour_array(
hv[idx], levels=levels, colors="white", linewidths=0.75, linestyles=":"
)
plt.clabel(c, fontsize=8)
pmv.plot_inactive()
plt.colorbar(h, ax=ax, shrink=0.5)

ax.plot(bp[:, 0], bp[:, 1], "r-")
for seg in segs:
sa = np.array(seg)
ax.plot(sa[:, 0], sa[:, 1], "b-")
# -
93 changes: 87 additions & 6 deletions autotest/test_model_splitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,11 @@ def test_metis_splitting_with_lak_sfr(function_tmpdir):

@requires_exe("mf6")
@requires_pkg("pymetis")
def test_save_load_node_mapping(function_tmpdir):
@requires_pkg("h5py")
def test_save_load_node_mapping_structured(function_tmpdir):
sim_path = get_example_data_path() / "mf6-freyberg"
new_sim_path = function_tmpdir / "mf6-freyberg/split_model"
json_file = new_sim_path / "node_map.json"
hdf_file = new_sim_path / "node_map.hdf5"
nparts = 5

sim = MFSimulation.load(sim_ws=sim_path)
Expand All @@ -225,12 +226,11 @@ def test_save_load_node_mapping(function_tmpdir):
new_sim.run_simulation()
original_node_map = mfsplit._node_map

mfsplit.save_node_mapping(json_file)
mfsplit.save_node_mapping(hdf_file)

new_sim2 = MFSimulation.load(sim_ws=new_sim_path)

mfsplit2 = Mf6Splitter(new_sim2)
mfsplit2.load_node_mapping(new_sim2, json_file)
mfsplit2 = Mf6Splitter.load_node_mapping(hdf_file)
saved_node_map = mfsplit2._node_map

for k, v1 in original_node_map.items():
Expand All @@ -249,6 +249,88 @@ def test_save_load_node_mapping(function_tmpdir):
np.testing.assert_allclose(new_heads, original_heads, err_msg=err_msg)


@requires_exe("mf6")
@requires_pkg("h5py")
def test_save_load_node_mapping_vertex(function_tmpdir):
sim_path = get_example_data_path() / "mf6" / "test003_gwftri_disv"
new_sim_path = function_tmpdir / "split_model"
hdf_file = new_sim_path / "vertex_node_map.hdf5"
sim = MFSimulation.load(sim_ws=sim_path)
sim.set_sim_path(function_tmpdir)
sim.write_simulation()
sim.run_simulation()

gwf = sim.get_model()
modelgrid = gwf.modelgrid

array = np.zeros((modelgrid.ncpl,), dtype=int)
array[0:85] = 1

mfsplit = Mf6Splitter(sim)
new_sim = mfsplit.split_model(array)

new_sim.set_sim_path(new_sim_path)
new_sim.write_simulation()
new_sim.run_simulation()
mfsplit.save_node_mapping(hdf_file)

original_heads = np.squeeze(gwf.output.head().get_alldata()[-1])

ml0 = new_sim.get_model("gwf_1_0")
ml1 = new_sim.get_model("gwf_1_1")
heads0 = ml0.output.head().get_alldata()[-1]
heads1 = ml1.output.head().get_alldata()[-1]

mfsplit2 = Mf6Splitter.load_node_mapping(hdf_file)

new_heads = mfsplit2.reconstruct_array({0: heads0, 1: heads1})

err_msg = "Heads from original and split models do not match"
np.testing.assert_allclose(
new_heads, original_heads, rtol=0.002, atol=0.01, err_msg=err_msg
)


@requires_exe("mf6")
@requires_pkg("h5py")
def test_save_load_node_mapping_unstructured(function_tmpdir):
sim_path = get_example_data_path() / "mf6" / "test006_gwf3"
new_sim_path = function_tmpdir / "split_model"
hdf_file = new_sim_path / "unstruct_node_mapping.hdf5"

sim = MFSimulation.load(sim_ws=sim_path)
sim.set_sim_path(function_tmpdir)
sim.write_simulation()
sim.run_simulation()

gwf = sim.get_model()
modelgrid = gwf.modelgrid

array = np.zeros((modelgrid.nnodes,), dtype=int)
array[65:] = 1

mfsplit = Mf6Splitter(sim)
new_sim = mfsplit.split_model(array)

new_sim.set_sim_path(new_sim_path)
new_sim.write_simulation()
new_sim.run_simulation()
mfsplit.save_node_mapping(hdf_file)

original_heads = np.squeeze(gwf.output.head().get_alldata()[-1])

ml0 = new_sim.get_model("gwf_1_0")
ml1 = new_sim.get_model("gwf_1_1")
heads0 = ml0.output.head().get_alldata()[-1]
heads1 = ml1.output.head().get_alldata()[-1]

mfsplit2 = Mf6Splitter.load_node_mapping(hdf_file)
new_heads = mfsplit2.reconstruct_array({0: heads0, 1: heads1})

err_msg = "Heads from original and split models do not match"
np.testing.assert_allclose(new_heads, original_heads, err_msg=err_msg)


def test_control_records(function_tmpdir):
nrow = 10
ncol = 10
Expand Down Expand Up @@ -847,7 +929,6 @@ def test_unstructured_complex_disu(function_tmpdir):
raise AssertionError("Reconstructed head results outside of tolerance")


@pytest.mark.slow
@requires_exe("mf6")
@requires_pkg("pymetis", "scipy")
def test_multi_model(function_tmpdir):
Expand Down
1 change: 1 addition & 0 deletions etc/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,4 @@ dependencies:
- shapely>=2.0
- vtk
- xmipy
- h5py
Loading
Loading