Python Example Scripts#
example_brownian.py#
1from datetime import timedelta
2
3import numpy as np
4import parcels
5import pytest
6
7ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
8
9
10def mesh_conversion(mesh):
11 return (1852.0 * 60) if mesh == "spherical" else 1.0
12
13
14@pytest.mark.parametrize("mode", ["scipy", "jit"])
15@pytest.mark.parametrize("mesh", ["flat", "spherical"])
16def test_brownian_example(mode, mesh, npart=3000):
17 fieldset = parcels.FieldSet.from_data(
18 {"U": 0, "V": 0}, {"lon": 0, "lat": 0}, mesh=mesh
19 )
20
21 # Set diffusion constants.
22 kh_zonal = 100 # in m^2/s
23 kh_meridional = 100 # in m^2/s
24
25 # Create field of constant Kh_zonal and Kh_meridional
26 fieldset.add_field(parcels.Field("Kh_zonal", kh_zonal, lon=0, lat=0, mesh=mesh))
27 fieldset.add_field(
28 parcels.Field("Kh_meridional", kh_meridional, lon=0, lat=0, mesh=mesh)
29 )
30
31 # Set random seed
32 parcels.ParcelsRandom.seed(123456)
33
34 runtime = timedelta(days=1)
35
36 parcels.ParcelsRandom.seed(1234)
37 pset = parcels.ParticleSet(
38 fieldset=fieldset, pclass=ptype[mode], lon=np.zeros(npart), lat=np.zeros(npart)
39 )
40 pset.execute(
41 pset.Kernel(parcels.DiffusionUniformKh), runtime=runtime, dt=timedelta(hours=1)
42 )
43
44 expected_std_x = np.sqrt(2 * kh_zonal * runtime.total_seconds())
45 expected_std_y = np.sqrt(2 * kh_meridional * runtime.total_seconds())
46
47 ys = pset.lat * mesh_conversion(mesh)
48 xs = pset.lon * mesh_conversion(
49 mesh
50 ) # since near equator, we do not need to care about curvature effect
51
52 tol = 250 # 250m tolerance
53 assert np.allclose(np.std(xs), expected_std_x, atol=tol)
54 assert np.allclose(np.std(ys), expected_std_y, atol=tol)
55 assert np.allclose(np.mean(xs), 0, atol=tol)
56 assert np.allclose(np.mean(ys), 0, atol=tol)
57
58
59if __name__ == "__main__":
60 test_brownian_example("jit", "spherical", npart=2000)
example_dask_chunk_OCMs.py#
1import math
2from datetime import timedelta
3from glob import glob
4
5import dask
6import numpy as np
7import parcels
8import pytest
9from parcels.tools.statuscodes import DaskChunkingError
10
11ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
12
13
14def compute_nemo_particle_advection(fieldset, mode):
15 npart = 20
16 lonp = 2.5 * np.ones(npart)
17 latp = [i for i in 52.0 + (-1e-3 + np.random.rand(npart) * 2.0 * 1e-3)]
18
19 def periodicBC(particle, fieldSet, time):
20 if particle.lon > 15.0:
21 particle_dlon -= 15.0 # noqa
22 if particle.lon < 0:
23 particle_dlon += 15.0
24 if particle.lat > 60.0:
25 particle_dlat -= 11.0 # noqa
26 if particle.lat < 49.0:
27 particle_dlat += 11.0
28
29 pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp)
30 kernels = pset.Kernel(parcels.AdvectionRK4) + periodicBC
31 pset.execute(kernels, runtime=timedelta(days=4), dt=timedelta(hours=6))
32 return pset
33
34
35@pytest.mark.parametrize("mode", ["jit"])
36@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
37def test_nemo_3D(mode, chunk_mode):
38 if chunk_mode in [
39 "auto",
40 ]:
41 dask.config.set({"array.chunk-size": "256KiB"})
42 else:
43 dask.config.set({"array.chunk-size": "128MiB"})
44 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
45 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
46 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
47 wfiles = sorted(glob(f"{data_folder}/ORCA*W.nc"))
48 mesh_mask = f"{data_folder}/coordinates.nc"
49
50 filenames = {
51 "U": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": ufiles},
52 "V": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": vfiles},
53 "W": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": wfiles},
54 }
55 variables = {"U": "uo", "V": "vo", "W": "wo"}
56 dimensions = {
57 "U": {
58 "lon": "glamf",
59 "lat": "gphif",
60 "depth": "depthw",
61 "time": "time_counter",
62 },
63 "V": {
64 "lon": "glamf",
65 "lat": "gphif",
66 "depth": "depthw",
67 "time": "time_counter",
68 },
69 "W": {
70 "lon": "glamf",
71 "lat": "gphif",
72 "depth": "depthw",
73 "time": "time_counter",
74 },
75 }
76 chs = False
77 if chunk_mode == "auto":
78 chs = "auto"
79 elif chunk_mode == "specific":
80 chs = {
81 "U": {"depth": ("depthu", 75), "lat": ("y", 16), "lon": ("x", 16)},
82 "V": {"depth": ("depthv", 75), "lat": ("y", 16), "lon": ("x", 16)},
83 "W": {"depth": ("depthw", 75), "lat": ("y", 16), "lon": ("x", 16)},
84 }
85 elif chunk_mode == "failsafe": # chunking time and but not depth
86 filenames = {
87 "U": {
88 "lon": mesh_mask,
89 "lat": mesh_mask,
90 "depth": wfiles[0],
91 "data": ufiles,
92 },
93 "V": {
94 "lon": mesh_mask,
95 "lat": mesh_mask,
96 "depth": wfiles[0],
97 "data": vfiles,
98 },
99 }
100 variables = {"U": "uo", "V": "vo"}
101 dimensions = {
102 "U": {
103 "lon": "glamf",
104 "lat": "gphif",
105 "depth": "depthw",
106 "time": "time_counter",
107 },
108 "V": {
109 "lon": "glamf",
110 "lat": "gphif",
111 "depth": "depthw",
112 "time": "time_counter",
113 },
114 }
115 chs = {
116 "U": {
117 "time": ("time_counter", 1),
118 "depth": ("depthu", 25),
119 "lat": ("y", -1),
120 "lon": ("x", -1),
121 },
122 "V": {
123 "time": ("time_counter", 1),
124 "depth": ("depthv", 75),
125 "lat": ("y", -1),
126 "lon": ("x", -1),
127 },
128 }
129
130 fieldset = parcels.FieldSet.from_nemo(
131 filenames, variables, dimensions, chunksize=chs
132 )
133
134 compute_nemo_particle_advection(fieldset, mode)
135 # Nemo sample file dimensions: depthu=75, y=201, x=151
136 if chunk_mode != "failsafe":
137 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
138 assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk)
139 if chunk_mode is False:
140 assert len(fieldset.U.grid._load_chunk) == 1
141 elif chunk_mode == "auto":
142 assert (
143 fieldset.gridset.size == 3
144 ) # because three different grids in 'auto' mode
145 assert len(fieldset.U.grid._load_chunk) != 1
146 elif chunk_mode == "specific":
147 assert len(fieldset.U.grid._load_chunk) == (
148 1
149 * int(math.ceil(75.0 / 75.0))
150 * int(math.ceil(201.0 / 16.0))
151 * int(math.ceil(151.0 / 16.0))
152 )
153 elif chunk_mode == "failsafe": # chunking time and depth but not lat and lon
154 assert len(fieldset.U.grid._load_chunk) != 1
155 assert len(fieldset.U.grid._load_chunk) == (
156 1
157 * int(math.ceil(75.0 / 25.0))
158 * int(math.ceil(201.0 / 171.0))
159 * int(math.ceil(151.0 / 151.0))
160 )
161 assert len(fieldset.V.grid._load_chunk) != 1
162 assert len(fieldset.V.grid._load_chunk) == (
163 1
164 * int(math.ceil(75.0 / 75.0))
165 * int(math.ceil(201.0 / 171.0))
166 * int(math.ceil(151.0 / 151.0))
167 )
168
169
170@pytest.mark.parametrize("mode", ["jit"])
171@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
172def test_globcurrent_2D(mode, chunk_mode):
173 if chunk_mode in [
174 "auto",
175 ]:
176 dask.config.set({"array.chunk-size": "16KiB"})
177 else:
178 dask.config.set({"array.chunk-size": "128MiB"})
179 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
180 filenames = str(
181 data_folder / "200201*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc"
182 )
183 variables = {
184 "U": "eastward_eulerian_current_velocity",
185 "V": "northward_eulerian_current_velocity",
186 }
187 dimensions = {"lat": "lat", "lon": "lon", "time": "time"}
188 chs = False
189 if chunk_mode == "auto":
190 chs = "auto"
191 elif chunk_mode == "specific":
192 chs = {
193 "U": {"lat": ("lat", 8), "lon": ("lon", 8)},
194 "V": {"lat": ("lat", 8), "lon": ("lon", 8)},
195 }
196 elif chunk_mode == "failsafe": # chunking time but not lat
197 chs = {
198 "U": {"time": ("time", 1), "lat": ("lat", 10), "lon": ("lon", -1)},
199 "V": {"time": ("time", 1), "lat": ("lat", -1), "lon": ("lon", -1)},
200 }
201
202 fieldset = parcels.FieldSet.from_netcdf(
203 filenames, variables, dimensions, chunksize=chs
204 )
205 try:
206 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
207 pset.execute(
208 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
209 )
210 except DaskChunkingError:
211 # we removed the failsafe, so now if all chunksize dimensions are incorrect, there is nothing left to chunk,
212 # which raises an error saying so. This is the expected behaviour
213 if chunk_mode == "failsafe":
214 return
215 # GlobCurrent sample file dimensions: time=UNLIMITED, lat=41, lon=81
216 if chunk_mode != "failsafe": # chunking time but not lat
217 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
218 if chunk_mode is False:
219 assert len(fieldset.U.grid._load_chunk) == 1
220 elif chunk_mode == "auto":
221 assert len(fieldset.U.grid._load_chunk) != 1
222 elif chunk_mode == "specific":
223 assert len(fieldset.U.grid._load_chunk) == (
224 1 * int(math.ceil(41.0 / 8.0)) * int(math.ceil(81.0 / 8.0))
225 )
226 elif chunk_mode == "failsafe": # chunking time but not lat
227 assert len(fieldset.U.grid._load_chunk) != 1
228 assert len(fieldset.V.grid._load_chunk) != 1
229 assert abs(pset[0].lon - 23.8) < 1
230 assert abs(pset[0].lat - -35.3) < 1
231
232
233@pytest.mark.skip(
234 reason="Started failing around #1644 (2024-08-08). Some change in chunking, inconsistent behavior."
235)
236@pytest.mark.parametrize("mode", ["jit"])
237@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
238def test_pop(mode, chunk_mode):
239 if chunk_mode in [
240 "auto",
241 ]:
242 dask.config.set({"array.chunk-size": "256KiB"})
243 else:
244 dask.config.set({"array.chunk-size": "128MiB"})
245 data_folder = parcels.download_example_dataset("POPSouthernOcean_data")
246 filenames = str(data_folder / "t.x1_SAMOC_flux.1690*.nc")
247 variables = {"U": "UVEL", "V": "VVEL", "W": "WVEL"}
248 timestamps = np.expand_dims(
249 np.array([np.datetime64("2000-%.2d-01" % m) for m in range(1, 7)]), axis=1
250 )
251 dimensions = {"lon": "ULON", "lat": "ULAT", "depth": "w_dep"}
252 chs = False
253 if chunk_mode == "auto":
254 chs = "auto"
255 elif chunk_mode == "specific":
256 chs = {"lon": ("i", 8), "lat": ("j", 8), "depth": ("k", 3)}
257 elif chunk_mode == "failsafe": # here: bad depth entry
258 chs = {"depth": ("wz", 3), "lat": ("j", 8), "lon": ("i", 8)}
259
260 fieldset = parcels.FieldSet.from_pop(
261 filenames, variables, dimensions, chunksize=chs, timestamps=timestamps
262 )
263
264 npart = 20
265 lonp = 70.0 * np.ones(npart)
266 latp = [i for i in -45.0 + (-0.25 + np.random.rand(npart) * 2.0 * 0.25)]
267 pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp)
268 pset.execute(parcels.AdvectionRK4, runtime=timedelta(days=90), dt=timedelta(days=2))
269 # POP sample file dimensions: k=21, j=60, i=60
270 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
271 assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk)
272 if chunk_mode is False:
273 assert fieldset.gridset.size == 1
274 assert len(fieldset.U.grid._load_chunk) == 1
275 assert len(fieldset.V.grid._load_chunk) == 1
276 assert len(fieldset.W.grid._load_chunk) == 1
277 elif chunk_mode == "auto":
278 assert (
279 fieldset.gridset.size == 3
280 ) # because three different grids in 'auto' mode
281 assert len(fieldset.U.grid._load_chunk) != 1
282 assert len(fieldset.V.grid._load_chunk) != 1
283 assert len(fieldset.W.grid._load_chunk) != 1
284 elif chunk_mode == "specific":
285 assert fieldset.gridset.size == 1
286 assert len(fieldset.U.grid._load_chunk) == (
287 int(math.ceil(21.0 / 3.0))
288 * int(math.ceil(60.0 / 8.0))
289 * int(math.ceil(60.0 / 8.0))
290 )
291 elif chunk_mode == "failsafe": # here: done a typo in the netcdf dimname field
292 assert fieldset.gridset.size == 1
293 assert len(fieldset.U.grid._load_chunk) != 1
294 assert len(fieldset.V.grid._load_chunk) != 1
295 assert len(fieldset.W.grid._load_chunk) != 1
296 assert len(fieldset.U.grid._load_chunk) == (
297 int(math.ceil(21.0 / 3.0))
298 * int(math.ceil(60.0 / 8.0))
299 * int(math.ceil(60.0 / 8.0))
300 )
301
302
303@pytest.mark.parametrize("mode", ["jit"])
304@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
305def test_swash(mode, chunk_mode):
306 if chunk_mode in [
307 "auto",
308 ]:
309 dask.config.set({"array.chunk-size": "32KiB"})
310 else:
311 dask.config.set({"array.chunk-size": "128MiB"})
312 data_folder = parcels.download_example_dataset("SWASH_data")
313 filenames = str(data_folder / "field_*.nc")
314 variables = {
315 "U": "cross-shore velocity",
316 "V": "along-shore velocity",
317 "W": "vertical velocity",
318 "depth_w": "time varying depth",
319 "depth_u": "time varying depth_u",
320 }
321 dimensions = {
322 "U": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
323 "V": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
324 "W": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
325 "depth_w": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
326 "depth_u": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
327 }
328 chs = False
329 if chunk_mode == "auto":
330 chs = "auto"
331 elif chunk_mode == "specific":
332 chs = {
333 "U": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
334 "V": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
335 "W": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
336 "depth_w": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
337 "depth_u": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
338 }
339 elif (
340 chunk_mode == "failsafe"
341 ): # here: incorrect matching between dimension names and their attachment to the NC-variables
342 chs = {
343 "U": {"depth": ("depth", 7), "lat": ("y", 4), "lon": ("x", 4)},
344 "V": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
345 "W": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
346 "depth_w": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
347 "depth_u": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
348 }
349 fieldset = parcels.FieldSet.from_netcdf(
350 filenames,
351 variables,
352 dimensions,
353 mesh="flat",
354 allow_time_extrapolation=True,
355 chunksize=chs,
356 )
357 fieldset.U.set_depth_from_field(fieldset.depth_u)
358 fieldset.V.set_depth_from_field(fieldset.depth_u)
359 fieldset.W.set_depth_from_field(fieldset.depth_w)
360
361 npart = 20
362 lonp = [i for i in 9.5 + (-0.2 + np.random.rand(npart) * 2.0 * 0.2)]
363 latp = [i for i in np.arange(start=12.3, stop=13.1, step=0.04)[0:20]]
364 depthp = [
365 -0.1,
366 ] * npart
367 pset = parcels.ParticleSet.from_list(
368 fieldset, ptype[mode], lon=lonp, lat=latp, depth=depthp
369 )
370 pset.execute(
371 parcels.AdvectionRK4,
372 runtime=timedelta(seconds=0.2),
373 dt=timedelta(seconds=0.005),
374 )
375 # SWASH sample file dimensions: t=1, z=7, z_u=6, y=21, x=51
376 if chunk_mode not in [
377 "failsafe",
378 ]:
379 assert len(fieldset.U.grid._load_chunk) == len(
380 fieldset.V.grid._load_chunk
381 ), f"U {fieldset.U.grid.chunk_info} vs V {fieldset.V.grid.chunk_info}"
382 if chunk_mode not in ["failsafe", "auto"]:
383 assert len(fieldset.U.grid._load_chunk) == len(
384 fieldset.W.grid._load_chunk
385 ), f"U {fieldset.U.grid.chunk_info} vs W {fieldset.W.grid.chunk_info}"
386 if chunk_mode is False:
387 assert len(fieldset.U.grid._load_chunk) == 1
388 else:
389 assert len(fieldset.U.grid._load_chunk) != 1
390 assert len(fieldset.V.grid._load_chunk) != 1
391 assert len(fieldset.W.grid._load_chunk) != 1
392 if chunk_mode == "specific":
393 assert len(fieldset.U.grid._load_chunk) == (
394 1
395 * int(math.ceil(6.0 / 6.0))
396 * int(math.ceil(21.0 / 4.0))
397 * int(math.ceil(51.0 / 4.0))
398 )
399 assert len(fieldset.V.grid._load_chunk) == (
400 1
401 * int(math.ceil(6.0 / 6.0))
402 * int(math.ceil(21.0 / 4.0))
403 * int(math.ceil(51.0 / 4.0))
404 )
405 assert len(fieldset.W.grid._load_chunk) == (
406 1
407 * int(math.ceil(7.0 / 7.0))
408 * int(math.ceil(21.0 / 4.0))
409 * int(math.ceil(51.0 / 4.0))
410 )
411
412
413@pytest.mark.parametrize("mode", ["jit"])
414@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific"])
415def test_ofam_3D(mode, chunk_mode):
416 if chunk_mode in [
417 "auto",
418 ]:
419 dask.config.set({"array.chunk-size": "1024KiB"})
420 else:
421 dask.config.set({"array.chunk-size": "128MiB"})
422
423 data_folder = parcels.download_example_dataset("OFAM_example_data")
424 filenames = {
425 "U": f"{data_folder}/OFAM_simple_U.nc",
426 "V": f"{data_folder}/OFAM_simple_V.nc",
427 }
428 variables = {"U": "u", "V": "v"}
429 dimensions = {
430 "lat": "yu_ocean",
431 "lon": "xu_ocean",
432 "depth": "st_ocean",
433 "time": "Time",
434 }
435
436 chs = False
437 if chunk_mode == "auto":
438 chs = "auto"
439 elif chunk_mode == "specific":
440 chs = {
441 "lon": ("xu_ocean", 100),
442 "lat": ("yu_ocean", 50),
443 "depth": ("st_edges_ocean", 60),
444 "time": ("Time", 1),
445 }
446 fieldset = parcels.FieldSet.from_netcdf(
447 filenames, variables, dimensions, allow_time_extrapolation=True, chunksize=chs
448 )
449
450 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=180, lat=10, depth=2.5)
451 pset.execute(
452 parcels.AdvectionRK4, runtime=timedelta(days=10), dt=timedelta(minutes=5)
453 )
454 # OFAM sample file dimensions: time=UNLIMITED, st_ocean=1, st_edges_ocean=52, lat=601, lon=2001
455 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
456 if chunk_mode is False:
457 assert len(fieldset.U.grid._load_chunk) == 1
458 elif chunk_mode == "auto":
459 assert len(fieldset.U.grid._load_chunk) != 1
460 elif chunk_mode == "specific":
461 numblocks = [i for i in fieldset.U.grid.chunk_info[1:3]]
462 dblocks = 1
463 vblocks = 0
464 for bsize in fieldset.U.grid.chunk_info[3 : 3 + numblocks[0]]:
465 vblocks += bsize
466 ublocks = 0
467 for bsize in fieldset.U.grid.chunk_info[
468 3 + numblocks[0] : 3 + numblocks[0] + numblocks[1]
469 ]:
470 ublocks += bsize
471 matching_numblocks = ublocks == 2001 and vblocks == 601 and dblocks == 1
472 matching_fields = fieldset.U.grid.chunk_info == fieldset.V.grid.chunk_info
473 matching_uniformblocks = len(fieldset.U.grid._load_chunk) == (
474 1
475 * int(math.ceil(1.0 / 60.0))
476 * int(math.ceil(601.0 / 50.0))
477 * int(math.ceil(2001.0 / 100.0))
478 )
479 assert matching_uniformblocks or (matching_fields and matching_numblocks)
480 assert abs(pset[0].lon - 173) < 1
481 assert abs(pset[0].lat - 11) < 1
482
483
484@pytest.mark.parametrize("mode", ["jit"])
485@pytest.mark.parametrize(
486 "chunk_mode", [False, "auto", "specific_same", "specific_different"]
487)
488@pytest.mark.parametrize("using_add_field", [False, True])
489def test_mitgcm(mode, chunk_mode, using_add_field):
490 if chunk_mode in [
491 "auto",
492 ]:
493 dask.config.set({"array.chunk-size": "512KiB"})
494 else:
495 dask.config.set({"array.chunk-size": "128MiB"})
496 data_folder = parcels.download_example_dataset("MITgcm_example_data")
497 filenames = {
498 "U": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
499 "V": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
500 }
501 variables = {"U": "UVEL", "V": "VVEL"}
502 dimensions = {
503 "U": {"lon": "XG", "lat": "YG", "time": "time"},
504 "V": {"lon": "XG", "lat": "YG", "time": "time"},
505 }
506
507 chs = False
508 if chunk_mode == "auto":
509 chs = "auto"
510 elif chunk_mode == "specific_same":
511 chs = {
512 "U": {"lat": ("YC", 50), "lon": ("XG", 100)},
513 "V": {"lat": ("YG", 50), "lon": ("XC", 100)},
514 }
515 elif chunk_mode == "specific_different":
516 chs = {
517 "U": {"lat": ("YC", 50), "lon": ("XG", 100)},
518 "V": {"lat": ("YG", 40), "lon": ("XC", 100)},
519 }
520 if using_add_field:
521 if chs in [False, "auto"]:
522 chs = {"U": chs, "V": chs}
523 fieldset = parcels.FieldSet.from_mitgcm(
524 filenames["U"],
525 {"U": variables["U"]},
526 dimensions["U"],
527 mesh="flat",
528 chunksize=chs["U"],
529 )
530 fieldset2 = parcels.FieldSet.from_mitgcm(
531 filenames["V"],
532 {"V": variables["V"]},
533 dimensions["V"],
534 mesh="flat",
535 chunksize=chs["V"],
536 )
537 fieldset.add_field(fieldset2.V)
538 else:
539 fieldset = parcels.FieldSet.from_mitgcm(
540 filenames, variables, dimensions, mesh="flat", chunksize=chs
541 )
542
543 pset = parcels.ParticleSet.from_list(
544 fieldset=fieldset, pclass=ptype[mode], lon=5e5, lat=5e5
545 )
546 pset.execute(
547 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
548 )
549 # MITgcm sample file dimensions: time=10, XG=400, YG=200
550 if chunk_mode != "specific_different":
551 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
552 if chunk_mode in [
553 False,
554 ]:
555 assert len(fieldset.U.grid._load_chunk) == 1
556 elif chunk_mode in [
557 "auto",
558 ]:
559 assert len(fieldset.U.grid._load_chunk) != 1
560 elif "specific" in chunk_mode:
561 assert len(fieldset.U.grid._load_chunk) == (
562 1 * int(math.ceil(400.0 / 50.0)) * int(math.ceil(200.0 / 100.0))
563 )
564 if chunk_mode == "specific_same":
565 assert fieldset.gridset.size == 1
566 elif chunk_mode == "specific_different":
567 assert fieldset.gridset.size == 2
568 assert np.allclose(pset[0].lon, 5.27e5, atol=1e3)
569
570
571@pytest.mark.parametrize("mode", ["jit"])
572def test_diff_entry_dimensions_chunks(mode):
573 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
574 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
575 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
576 mesh_mask = f"{data_folder}/coordinates.nc"
577
578 filenames = {
579 "U": {"lon": mesh_mask, "lat": mesh_mask, "data": ufiles},
580 "V": {"lon": mesh_mask, "lat": mesh_mask, "data": vfiles},
581 }
582 variables = {"U": "uo", "V": "vo"}
583 dimensions = {
584 "U": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
585 "V": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
586 }
587 chs = {
588 "U": {"depth": ("depthu", 75), "lat": ("y", 16), "lon": ("x", 16)},
589 "V": {"depth": ("depthv", 75), "lat": ("y", 16), "lon": ("x", 16)},
590 }
591 fieldset = parcels.FieldSet.from_nemo(
592 filenames, variables, dimensions, chunksize=chs
593 )
594 compute_nemo_particle_advection(fieldset, mode)
595 # Nemo sample file dimensions: depthu=75, y=201, x=151
596 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
597
598
599@pytest.mark.parametrize("mode", ["scipy", "jit"])
600def test_3d_2dfield_sampling(mode):
601 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
602 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
603 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
604 mesh_mask = f"{data_folder}/coordinates.nc"
605
606 filenames = {
607 "U": {"lon": mesh_mask, "lat": mesh_mask, "data": ufiles},
608 "V": {"lon": mesh_mask, "lat": mesh_mask, "data": vfiles},
609 "nav_lon": {
610 "lon": mesh_mask,
611 "lat": mesh_mask,
612 "data": [
613 ufiles[0],
614 ],
615 },
616 }
617 variables = {"U": "uo", "V": "vo", "nav_lon": "nav_lon"}
618 dimensions = {
619 "U": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
620 "V": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
621 "nav_lon": {"lon": "glamf", "lat": "gphif"},
622 }
623 fieldset = parcels.FieldSet.from_nemo(
624 filenames, variables, dimensions, chunksize=False
625 )
626 fieldset.nav_lon.data = np.ones(fieldset.nav_lon.data.shape, dtype=np.float32)
627 fieldset.add_field(
628 parcels.Field(
629 "rectilinear_2D",
630 np.ones((2, 2)),
631 lon=np.array([-10, 20]),
632 lat=np.array([40, 80]),
633 chunksize=False,
634 )
635 )
636
637 class MyParticle(ptype[mode]):
638 sample_var_curvilinear = parcels.Variable("sample_var_curvilinear")
639 sample_var_rectilinear = parcels.Variable("sample_var_rectilinear")
640
641 pset = parcels.ParticleSet(fieldset, pclass=MyParticle, lon=2.5, lat=52)
642
643 def Sample2D(particle, fieldset, time):
644 particle.sample_var_curvilinear += fieldset.nav_lon[
645 time, particle.depth, particle.lat, particle.lon
646 ]
647 particle.sample_var_rectilinear += fieldset.rectilinear_2D[
648 time, particle.depth, particle.lat, particle.lon
649 ]
650
651 runtime, dt = 86400 * 4, 6 * 3600
652 pset.execute(pset.Kernel(parcels.AdvectionRK4) + Sample2D, runtime=runtime, dt=dt)
653
654 assert pset.sample_var_rectilinear == runtime / dt
655 assert pset.sample_var_curvilinear == runtime / dt
656
657
658@pytest.mark.parametrize("mode", ["jit"])
659def test_diff_entry_chunksize_error_nemo_complex_conform_depth(mode):
660 # ==== this test is expected to fall-back to a pre-defined minimal chunk as ==== #
661 # ==== the requested chunks don't match, or throw a value error. ==== #
662 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
663 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
664 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
665 wfiles = sorted(glob(f"{data_folder}/ORCA*W.nc"))
666 mesh_mask = f"{data_folder}/coordinates.nc"
667
668 filenames = {
669 "U": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": ufiles},
670 "V": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": vfiles},
671 "W": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": wfiles},
672 }
673 variables = {"U": "uo", "V": "vo", "W": "wo"}
674 dimensions = {
675 "U": {
676 "lon": "glamf",
677 "lat": "gphif",
678 "depth": "depthw",
679 "time": "time_counter",
680 },
681 "V": {
682 "lon": "glamf",
683 "lat": "gphif",
684 "depth": "depthw",
685 "time": "time_counter",
686 },
687 "W": {
688 "lon": "glamf",
689 "lat": "gphif",
690 "depth": "depthw",
691 "time": "time_counter",
692 },
693 }
694 chs = {
695 "U": {"depth": ("depthu", 75), "lat": ("y", 16), "lon": ("x", 16)},
696 "V": {"depth": ("depthv", 75), "lat": ("y", 4), "lon": ("x", 16)},
697 "W": {"depth": ("depthw", 75), "lat": ("y", 16), "lon": ("x", 4)},
698 }
699 fieldset = parcels.FieldSet.from_nemo(
700 filenames, variables, dimensions, chunksize=chs
701 )
702 compute_nemo_particle_advection(fieldset, mode)
703 # Nemo sample file dimensions: depthu=75, y=201, x=151
704 npart_U = 1
705 npart_U = [npart_U * k for k in fieldset.U.nchunks[1:]]
706 npart_V = 1
707 npart_V = [npart_V * k for k in fieldset.V.nchunks[1:]]
708 npart_W = 1
709 npart_W = [npart_W * k for k in fieldset.V.nchunks[1:]]
710 chn = {
711 "U": {
712 "lat": int(math.ceil(201.0 / chs["U"]["lat"][1])),
713 "lon": int(math.ceil(151.0 / chs["U"]["lon"][1])),
714 "depth": int(math.ceil(75.0 / chs["U"]["depth"][1])),
715 },
716 "V": {
717 "lat": int(math.ceil(201.0 / chs["V"]["lat"][1])),
718 "lon": int(math.ceil(151.0 / chs["V"]["lon"][1])),
719 "depth": int(math.ceil(75.0 / chs["V"]["depth"][1])),
720 },
721 "W": {
722 "lat": int(math.ceil(201.0 / chs["W"]["lat"][1])),
723 "lon": int(math.ceil(151.0 / chs["W"]["lon"][1])),
724 "depth": int(math.ceil(75.0 / chs["W"]["depth"][1])),
725 },
726 }
727 npart_U_request = 1
728 npart_U_request = [npart_U_request * chn["U"][k] for k in chn["U"]]
729 npart_V_request = 1
730 npart_V_request = [npart_V_request * chn["V"][k] for k in chn["V"]]
731 npart_W_request = 1
732 npart_W_request = [npart_W_request * chn["W"][k] for k in chn["W"]]
733 assert npart_U != npart_U_request
734 assert npart_V != npart_V_request
735 assert npart_W != npart_W_request
736
737
738@pytest.mark.parametrize("mode", ["jit"])
739def test_diff_entry_chunksize_correction_globcurrent(mode):
740 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
741 filenames = str(
742 data_folder / "200201*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc"
743 )
744 variables = {
745 "U": "eastward_eulerian_current_velocity",
746 "V": "northward_eulerian_current_velocity",
747 }
748 dimensions = {"lat": "lat", "lon": "lon", "time": "time"}
749 chs = {
750 "U": {"lat": ("lat", 16), "lon": ("lon", 16)},
751 "V": {"lat": ("lat", 16), "lon": ("lon", 4)},
752 }
753 fieldset = parcels.FieldSet.from_netcdf(
754 filenames, variables, dimensions, chunksize=chs
755 )
756 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
757 pset.execute(
758 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
759 )
760 # GlobCurrent sample file dimensions: time=UNLIMITED, lat=41, lon=81
761 npart_U = 1
762 npart_U = [npart_U * k for k in fieldset.U.nchunks[1:]]
763 npart_V = 1
764 npart_V = [npart_V * k for k in fieldset.V.nchunks[1:]]
765 npart_V_request = 1
766 chn = {
767 "U": {
768 "lat": int(math.ceil(41.0 / chs["U"]["lat"][1])),
769 "lon": int(math.ceil(81.0 / chs["U"]["lon"][1])),
770 },
771 "V": {
772 "lat": int(math.ceil(41.0 / chs["V"]["lat"][1])),
773 "lon": int(math.ceil(81.0 / chs["V"]["lon"][1])),
774 },
775 }
776 npart_V_request = [npart_V_request * chn["V"][k] for k in chn["V"]]
777 assert npart_V_request != npart_U
778 assert npart_V_request == npart_V
example_decaying_moving_eddy.py#
1from datetime import timedelta
2
3import numpy as np
4import parcels
5import pytest
6
7ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
8
9# Define some constants.
10u_g = 0.04 # Geostrophic current
11u_0 = 0.3 # Initial speed in x dirrection. v_0 = 0
12gamma = (
13 1.0 / timedelta(days=2.89).total_seconds()
14) # Dissipitave effects due to viscousity.
15gamma_g = 1.0 / timedelta(days=28.9).total_seconds()
16f = 1.0e-4 # Coriolis parameter.
17start_lon = [10000.0] # Define the start longitude and latitude for the particle.
18start_lat = [10000.0]
19
20
21def decaying_moving_eddy_fieldset(
22 xdim=2, ydim=2
23): # Define 2D flat, square fieldset for testing purposes.
24 """Simulate an ocean that accelerates subject to Coriolis force
25 and dissipative effects, upon which a geostrophic current is
26 superimposed.
27
28 The original test description can be found in: N. Fabbroni, 2009,
29 Numerical Simulation of Passive tracers dispersion in the sea,
30 Ph.D. dissertation, University of Bologna
31 http://amsdottorato.unibo.it/1733/1/Fabbroni_Nicoletta_Tesi.pdf
32 """
33 depth = np.zeros(1, dtype=np.float32)
34 time = np.arange(0.0, 2.0 * 86400.0 + 1e-5, 60.0 * 5.0, dtype=np.float64)
35 lon = np.linspace(0, 20000, xdim, dtype=np.float32)
36 lat = np.linspace(5000, 12000, ydim, dtype=np.float32)
37
38 U = np.zeros((time.size, lat.size, lon.size), dtype=np.float32)
39 V = np.zeros((time.size, lat.size, lon.size), dtype=np.float32)
40
41 for t in range(time.size):
42 U[t, :, :] = u_g * np.exp(-gamma_g * time[t]) + (u_0 - u_g) * np.exp(
43 -gamma * time[t]
44 ) * np.cos(f * time[t])
45 V[t, :, :] = -(u_0 - u_g) * np.exp(-gamma * time[t]) * np.sin(f * time[t])
46
47 data = {"U": U, "V": V}
48 dimensions = {"lon": lon, "lat": lat, "depth": depth, "time": time}
49 return parcels.FieldSet.from_data(data, dimensions, mesh="flat")
50
51
52def true_values(
53 t, x_0, y_0
54): # Calculate the expected values for particles at the endtime, given their start location.
55 x = (
56 x_0
57 + (u_g / gamma_g) * (1 - np.exp(-gamma_g * t))
58 + f
59 * ((u_0 - u_g) / (f**2 + gamma**2))
60 * (
61 (gamma / f)
62 + np.exp(-gamma * t) * (np.sin(f * t) - (gamma / f) * np.cos(f * t))
63 )
64 )
65 y = y_0 - ((u_0 - u_g) / (f**2 + gamma**2)) * f * (
66 1 - np.exp(-gamma * t) * (np.cos(f * t) + (gamma / f) * np.sin(f * t))
67 )
68
69 return np.array([x, y])
70
71
72def decaying_moving_example(
73 fieldset, outfile, mode="scipy", method=parcels.AdvectionRK4
74):
75 pset = parcels.ParticleSet(
76 fieldset, pclass=ptype[mode], lon=start_lon, lat=start_lat
77 )
78
79 dt = timedelta(minutes=5)
80 runtime = timedelta(days=2)
81 outputdt = timedelta(hours=1)
82
83 pset.execute(
84 method,
85 runtime=runtime,
86 dt=dt,
87 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
88 )
89
90 return pset
91
92
93@pytest.mark.parametrize("mode", ["scipy", "jit"])
94def test_rotation_example(mode, tmpdir):
95 outfile = tmpdir.join("DecayingMovingParticle.zarr")
96 fieldset = decaying_moving_eddy_fieldset()
97 pset = decaying_moving_example(fieldset, outfile, mode=mode)
98 vals = true_values(
99 pset[0].time, start_lon, start_lat
100 ) # Calculate values for the particle.
101 assert np.allclose(
102 np.array([[pset[0].lon], [pset[0].lat]]), vals, 1e-2
103 ) # Check advected values against calculated values.
104
105
106def main():
107 fset_filename = "decaying_moving_eddy"
108 outfile = "DecayingMovingParticle.zarr"
109 fieldset = decaying_moving_eddy_fieldset()
110 fieldset.write(fset_filename)
111
112 decaying_moving_example(fieldset, outfile)
113
114
115if __name__ == "__main__":
116 main()
example_globcurrent.py#
1from datetime import timedelta
2from glob import glob
3
4import numpy as np
5import parcels
6import pytest
7import xarray as xr
8
9ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
10
11
12def set_globcurrent_fieldset(
13 filename=None,
14 indices=None,
15 deferred_load=True,
16 use_xarray=False,
17 time_periodic=False,
18 timestamps=None,
19):
20 if filename is None:
21 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
22 filename = str(
23 data_folder / "2002*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc"
24 )
25 variables = {
26 "U": "eastward_eulerian_current_velocity",
27 "V": "northward_eulerian_current_velocity",
28 }
29 if timestamps is None:
30 dimensions = {"lat": "lat", "lon": "lon", "time": "time"}
31 else:
32 dimensions = {"lat": "lat", "lon": "lon"}
33 if use_xarray:
34 ds = xr.open_mfdataset(filename, combine="by_coords")
35 return parcels.FieldSet.from_xarray_dataset(
36 ds, variables, dimensions, time_periodic=time_periodic
37 )
38 else:
39 return parcels.FieldSet.from_netcdf(
40 filename,
41 variables,
42 dimensions,
43 indices,
44 deferred_load=deferred_load,
45 time_periodic=time_periodic,
46 timestamps=timestamps,
47 )
48
49
50@pytest.mark.parametrize("use_xarray", [True, False])
51def test_globcurrent_fieldset(use_xarray):
52 fieldset = set_globcurrent_fieldset(use_xarray=use_xarray)
53 assert fieldset.U.lon.size == 81
54 assert fieldset.U.lat.size == 41
55 assert fieldset.V.lon.size == 81
56 assert fieldset.V.lat.size == 41
57
58 if not use_xarray:
59 indices = {"lon": [5], "lat": range(20, 30)}
60 fieldsetsub = set_globcurrent_fieldset(indices=indices, use_xarray=use_xarray)
61 assert np.allclose(fieldsetsub.U.lon, fieldset.U.lon[indices["lon"]])
62 assert np.allclose(fieldsetsub.U.lat, fieldset.U.lat[indices["lat"]])
63 assert np.allclose(fieldsetsub.V.lon, fieldset.V.lon[indices["lon"]])
64 assert np.allclose(fieldsetsub.V.lat, fieldset.V.lat[indices["lat"]])
65
66
67@pytest.mark.parametrize("mode", ["scipy", "jit"])
68@pytest.mark.parametrize(
69 "dt, lonstart, latstart", [(3600.0, 25, -35), (-3600.0, 20, -39)]
70)
71@pytest.mark.parametrize("use_xarray", [True, False])
72def test_globcurrent_fieldset_advancetime(mode, dt, lonstart, latstart, use_xarray):
73 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
74 basepath = str(data_folder / "20*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc")
75 files = sorted(glob(str(basepath)))
76
77 fieldsetsub = set_globcurrent_fieldset(files[0:10], use_xarray=use_xarray)
78 psetsub = parcels.ParticleSet.from_list(
79 fieldset=fieldsetsub, pclass=ptype[mode], lon=[lonstart], lat=[latstart]
80 )
81
82 fieldsetall = set_globcurrent_fieldset(
83 files[0:10], deferred_load=False, use_xarray=use_xarray
84 )
85 psetall = parcels.ParticleSet.from_list(
86 fieldset=fieldsetall, pclass=ptype[mode], lon=[lonstart], lat=[latstart]
87 )
88 if dt < 0:
89 psetsub[0].time_nextloop = fieldsetsub.U.grid.time[-1]
90 psetall[0].time_nextloop = fieldsetall.U.grid.time[-1]
91
92 psetsub.execute(parcels.AdvectionRK4, runtime=timedelta(days=7), dt=dt)
93 psetall.execute(parcels.AdvectionRK4, runtime=timedelta(days=7), dt=dt)
94
95 assert abs(psetsub[0].lon - psetall[0].lon) < 1e-4
96
97
98@pytest.mark.parametrize("mode", ["scipy", "jit"])
99@pytest.mark.parametrize("use_xarray", [True, False])
100def test_globcurrent_particles(mode, use_xarray):
101 fieldset = set_globcurrent_fieldset(use_xarray=use_xarray)
102
103 lonstart = [25]
104 latstart = [-35]
105
106 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=lonstart, lat=latstart)
107
108 pset.execute(
109 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
110 )
111
112 assert abs(pset[0].lon - 23.8) < 1
113 assert abs(pset[0].lat - -35.3) < 1
114
115
116@pytest.mark.parametrize("mode", ["scipy", "jit"])
117@pytest.mark.parametrize("rundays", [300, 900])
118def test_globcurrent_time_periodic(mode, rundays):
119 sample_var = []
120 for deferred_load in [True, False]:
121 fieldset = set_globcurrent_fieldset(
122 time_periodic=timedelta(days=365), deferred_load=deferred_load
123 )
124
125 MyParticle = ptype[mode].add_variable("sample_var", initial=0.0)
126
127 pset = parcels.ParticleSet(
128 fieldset, pclass=MyParticle, lon=25, lat=-35, time=fieldset.U.grid.time[0]
129 )
130
131 def SampleU(particle, fieldset, time):
132 u, v = fieldset.UV[time, particle.depth, particle.lat, particle.lon]
133 particle.sample_var += u
134
135 pset.execute(SampleU, runtime=timedelta(days=rundays), dt=timedelta(days=1))
136 sample_var.append(pset[0].sample_var)
137
138 assert np.allclose(sample_var[0], sample_var[1])
139
140
141@pytest.mark.parametrize("dt", [-300, 300])
142def test_globcurrent_xarray_vs_netcdf(dt):
143 fieldsetNetcdf = set_globcurrent_fieldset(use_xarray=False)
144 fieldsetxarray = set_globcurrent_fieldset(use_xarray=True)
145 lonstart, latstart, runtime = (25, -35, timedelta(days=7))
146
147 psetN = parcels.ParticleSet(
148 fieldsetNetcdf, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
149 )
150 psetN.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
151
152 psetX = parcels.ParticleSet(
153 fieldsetxarray, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
154 )
155 psetX.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
156
157 assert np.allclose(psetN[0].lon, psetX[0].lon)
158 assert np.allclose(psetN[0].lat, psetX[0].lat)
159
160
161@pytest.mark.parametrize("dt", [-300, 300])
162def test_globcurrent_netcdf_timestamps(dt):
163 fieldsetNetcdf = set_globcurrent_fieldset()
164 timestamps = fieldsetNetcdf.U.grid.timeslices
165 fieldsetTimestamps = set_globcurrent_fieldset(timestamps=timestamps)
166 lonstart, latstart, runtime = (25, -35, timedelta(days=7))
167
168 psetN = parcels.ParticleSet(
169 fieldsetNetcdf, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
170 )
171 psetN.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
172
173 psetT = parcels.ParticleSet(
174 fieldsetTimestamps, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
175 )
176 psetT.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
177
178 assert np.allclose(psetN.lon[0], psetT.lon[0])
179 assert np.allclose(psetN.lat[0], psetT.lat[0])
180
181
182def test__particles_init_time():
183 fieldset = set_globcurrent_fieldset()
184
185 lonstart = [25]
186 latstart = [-35]
187
188 # tests the different ways of initialising the time of a particle
189 pset = parcels.ParticleSet(
190 fieldset,
191 pclass=parcels.JITParticle,
192 lon=lonstart,
193 lat=latstart,
194 time=np.datetime64("2002-01-15"),
195 )
196 pset2 = parcels.ParticleSet(
197 fieldset,
198 pclass=parcels.JITParticle,
199 lon=lonstart,
200 lat=latstart,
201 time=14 * 86400,
202 )
203 pset3 = parcels.ParticleSet(
204 fieldset,
205 pclass=parcels.JITParticle,
206 lon=lonstart,
207 lat=latstart,
208 time=np.array([np.datetime64("2002-01-15")]),
209 )
210 pset4 = parcels.ParticleSet(
211 fieldset,
212 pclass=parcels.JITParticle,
213 lon=lonstart,
214 lat=latstart,
215 time=[np.datetime64("2002-01-15")],
216 )
217 assert pset[0].time - pset2[0].time == 0
218 assert pset[0].time - pset3[0].time == 0
219 assert pset[0].time - pset4[0].time == 0
220
221
222@pytest.mark.xfail(reason="Time extrapolation error expected to be thrown", strict=True)
223@pytest.mark.parametrize("mode", ["scipy", "jit"])
224@pytest.mark.parametrize("use_xarray", [True, False])
225def test_globcurrent_time_extrapolation_error(mode, use_xarray):
226 fieldset = set_globcurrent_fieldset(use_xarray=use_xarray)
227
228 pset = parcels.ParticleSet(
229 fieldset,
230 pclass=ptype[mode],
231 lon=[25],
232 lat=[-35],
233 time=fieldset.U.time[0] - timedelta(days=1).total_seconds(),
234 )
235
236 pset.execute(
237 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
238 )
239
240
241@pytest.mark.parametrize("mode", ["scipy", "jit"])
242@pytest.mark.parametrize("dt", [-300, 300])
243@pytest.mark.parametrize("with_starttime", [True, False])
244def test_globcurrent_startparticles_between_time_arrays(mode, dt, with_starttime):
245 fieldset = set_globcurrent_fieldset()
246
247 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
248 fnamesFeb = sorted(glob(f"{data_folder}/200202*.nc"))
249 fieldset.add_field(
250 parcels.Field.from_netcdf(
251 fnamesFeb,
252 ("P", "eastward_eulerian_current_velocity"),
253 {"lat": "lat", "lon": "lon", "time": "time"},
254 )
255 )
256
257 MyParticle = ptype[mode].add_variable("sample_var", initial=0.0)
258
259 def SampleP(particle, fieldset, time):
260 particle.sample_var += fieldset.P[
261 time, particle.depth, particle.lat, particle.lon
262 ]
263
264 if with_starttime:
265 time = fieldset.U.grid.time[0] if dt > 0 else fieldset.U.grid.time[-1]
266 pset = parcels.ParticleSet(
267 fieldset, pclass=MyParticle, lon=[25], lat=[-35], time=time
268 )
269 else:
270 pset = parcels.ParticleSet(fieldset, pclass=MyParticle, lon=[25], lat=[-35])
271
272 if with_starttime:
273 with pytest.raises(parcels.TimeExtrapolationError):
274 pset.execute(
275 pset.Kernel(parcels.AdvectionRK4) + SampleP,
276 runtime=timedelta(days=1),
277 dt=dt,
278 )
279 else:
280 pset.execute(
281 pset.Kernel(parcels.AdvectionRK4) + SampleP,
282 runtime=timedelta(days=1),
283 dt=dt,
284 )
285
286
287@pytest.mark.parametrize("mode", ["scipy", "jit"])
288def test_globcurrent_particle_independence(mode, rundays=5):
289 fieldset = set_globcurrent_fieldset()
290 time0 = fieldset.U.grid.time[0]
291
292 def DeleteP0(particle, fieldset, time):
293 if particle.id == 0:
294 particle.delete()
295
296 pset0 = parcels.ParticleSet(
297 fieldset, pclass=ptype[mode], lon=[25, 25], lat=[-35, -35], time=time0
298 )
299
300 pset0.execute(
301 pset0.Kernel(DeleteP0) + parcels.AdvectionRK4,
302 runtime=timedelta(days=rundays),
303 dt=timedelta(minutes=5),
304 )
305
306 pset1 = parcels.ParticleSet(
307 fieldset, pclass=ptype[mode], lon=[25, 25], lat=[-35, -35], time=time0
308 )
309
310 pset1.execute(
311 parcels.AdvectionRK4, runtime=timedelta(days=rundays), dt=timedelta(minutes=5)
312 )
313
314 assert np.allclose([pset0[-1].lon, pset0[-1].lat], [pset1[-1].lon, pset1[-1].lat])
315
316
317@pytest.mark.parametrize("mode", ["scipy", "jit"])
318@pytest.mark.parametrize("dt", [-300, 300])
319@pytest.mark.parametrize("pid_offset", [0, 20])
320def test_globcurrent_pset_fromfile(mode, dt, pid_offset, tmpdir):
321 filename = tmpdir.join("pset_fromparticlefile.zarr")
322 fieldset = set_globcurrent_fieldset()
323
324 ptype[mode].setLastID(pid_offset)
325 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
326 pfile = pset.ParticleFile(filename, outputdt=timedelta(hours=6))
327 pset.execute(
328 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=dt, output_file=pfile
329 )
330 pfile.write_latest_locations(pset, max(pset.time_nextloop))
331
332 restarttime = np.nanmax if dt > 0 else np.nanmin
333 pset_new = parcels.ParticleSet.from_particlefile(
334 fieldset, pclass=ptype[mode], filename=filename, restarttime=restarttime
335 )
336 pset.execute(parcels.AdvectionRK4, runtime=timedelta(days=1), dt=dt)
337 pset_new.execute(parcels.AdvectionRK4, runtime=timedelta(days=1), dt=dt)
338
339 for var in ["lon", "lat", "depth", "time", "id"]:
340 assert np.allclose(
341 [getattr(p, var) for p in pset], [getattr(p, var) for p in pset_new]
342 )
343
344
345@pytest.mark.parametrize("mode", ["scipy", "jit"])
346def test_error_outputdt_not_multiple_dt(mode, tmpdir):
347 # Test that outputdt is a multiple of dt
348 fieldset = set_globcurrent_fieldset()
349
350 filepath = tmpdir.join("pfile_error_outputdt_not_multiple_dt.zarr")
351
352 dt = 81.2584344538292 # number for which output writing fails
353
354 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=[0], lat=[0])
355 ofile = pset.ParticleFile(name=filepath, outputdt=timedelta(days=1))
356
357 def DoNothing(particle, fieldset, time):
358 pass
359
360 with pytest.raises(ValueError):
361 pset.execute(DoNothing, runtime=timedelta(days=10), dt=dt, output_file=ofile)
example_mitgcm.py#
1from datetime import timedelta
2
3import numpy as np
4import parcels
5import xarray as xr
6
7ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
8
9
10def run_mitgcm_zonally_reentrant(mode):
11 """Function that shows how to load MITgcm data in a zonally periodic domain."""
12 data_folder = parcels.download_example_dataset("MITgcm_example_data")
13 filenames = {
14 "U": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
15 "V": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
16 }
17 variables = {"U": "UVEL", "V": "VVEL"}
18 dimensions = {
19 "U": {"lon": "XG", "lat": "YG", "time": "time"},
20 "V": {"lon": "XG", "lat": "YG", "time": "time"},
21 }
22 fieldset = parcels.FieldSet.from_mitgcm(
23 filenames, variables, dimensions, mesh="flat"
24 )
25
26 fieldset.add_periodic_halo(zonal=True)
27 fieldset.add_constant("domain_width", 1000000)
28
29 def periodicBC(particle, fieldset, time):
30 if particle.lon < 0:
31 particle_dlon += fieldset.domain_width # noqa
32 elif particle.lon > fieldset.domain_width:
33 particle_dlon -= fieldset.domain_width
34
35 # Release particles 5 cells away from the Eastern boundary
36 pset = parcels.ParticleSet.from_line(
37 fieldset,
38 pclass=ptype[mode],
39 start=(fieldset.U.grid.lon[-5], fieldset.U.grid.lat[5]),
40 finish=(fieldset.U.grid.lon[-5], fieldset.U.grid.lat[-5]),
41 size=10,
42 )
43 pfile = parcels.ParticleFile(
44 "MIT_particles_" + str(mode) + ".zarr",
45 pset,
46 outputdt=timedelta(days=1),
47 chunks=(len(pset), 1),
48 )
49 kernels = parcels.AdvectionRK4 + pset.Kernel(periodicBC)
50 pset.execute(
51 kernels, runtime=timedelta(days=5), dt=timedelta(minutes=30), output_file=pfile
52 )
53
54
55def test_mitgcm_output_compare():
56 run_mitgcm_zonally_reentrant("scipy")
57 run_mitgcm_zonally_reentrant("jit")
58
59 ds_jit = xr.open_zarr("MIT_particles_jit.zarr")
60 ds_scipy = xr.open_zarr("MIT_particles_scipy.zarr")
61
62 np.testing.assert_allclose(ds_jit.lat.data, ds_scipy.lat.data)
63 np.testing.assert_allclose(ds_jit.lon.data, ds_scipy.lon.data)
example_moving_eddies.py#
1import gc
2import math
3from argparse import ArgumentParser
4from datetime import timedelta
5
6import numpy as np
7import parcels
8import pytest
9
10ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
11method = {
12 "RK4": parcels.AdvectionRK4,
13 "EE": parcels.AdvectionEE,
14 "RK45": parcels.AdvectionRK45,
15}
16
17
18def moving_eddies_fieldset(xdim=200, ydim=350, mesh="flat"):
19 """Generate a fieldset encapsulating the flow field consisting of two
20 moving eddies, one moving westward and the other moving northwestward.
21
22 Parameters
23 ----------
24 xdim :
25 Horizontal dimension of the generated fieldset (Default value = 200)
26 xdim :
27 Vertical dimension of the generated fieldset (Default value = 200)
28 mesh : str
29 String indicating the type of mesh coordinates and
30 units used during velocity interpolation:
31
32 1. spherical: Lat and lon in degree, with a
33 correction for zonal velocity U near the poles.
34 2. flat (default): No conversion, lat/lon are assumed to be in m.
35 ydim :
36 (Default value = 350)
37
38
39 Notes
40 -----
41 Note that this is not a proper geophysical flow. Rather, a Gaussian eddy is moved
42 artificially with uniform velocities. Velocities are calculated from geostrophy.
43
44 """
45 # Set Parcels FieldSet variables
46 time = np.arange(0.0, 8.0 * 86400.0, 86400.0, dtype=np.float64)
47
48 # Coordinates of the test fieldset (on A-grid in m)
49 if mesh == "spherical":
50 lon = np.linspace(0, 4, xdim, dtype=np.float32)
51 lat = np.linspace(45, 52, ydim, dtype=np.float32)
52 else:
53 lon = np.linspace(0, 4.0e5, xdim, dtype=np.float32)
54 lat = np.linspace(0, 7.0e5, ydim, dtype=np.float32)
55
56 # Grid spacing in m
57 def cosd(x):
58 return math.cos(math.radians(float(x)))
59
60 dx = (
61 (lon[1] - lon[0]) * 1852 * 60 * cosd(lat.mean())
62 if mesh == "spherical"
63 else lon[1] - lon[0]
64 )
65 dy = (lat[1] - lat[0]) * 1852 * 60 if mesh == "spherical" else lat[1] - lat[0]
66
67 # Define arrays U (zonal), V (meridional), and P (sea surface height) on A-grid
68 U = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
69 V = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
70 P = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
71
72 # Some constants
73 corio_0 = 1.0e-4 # Coriolis parameter
74 h0 = 1 # Max eddy height
75 sig = 0.5 # Eddy e-folding decay scale (in degrees)
76 g = 10 # Gravitational constant
77 eddyspeed = 0.1 # Translational speed in m/s
78 dX = eddyspeed * 86400 / dx # Grid cell movement of eddy max each day
79 dY = eddyspeed * 86400 / dy # Grid cell movement of eddy max each day
80
81 [x, y] = np.mgrid[: lon.size, : lat.size]
82 for t in range(time.size):
83 hymax_1 = lat.size / 7.0
84 hxmax_1 = 0.75 * lon.size - dX * t
85 hymax_2 = 3.0 * lat.size / 7.0 + dY * t
86 hxmax_2 = 0.75 * lon.size - dX * t
87
88 P[:, :, t] = h0 * np.exp(
89 -((x - hxmax_1) ** 2) / (sig * lon.size / 4.0) ** 2
90 - (y - hymax_1) ** 2 / (sig * lat.size / 7.0) ** 2
91 )
92 P[:, :, t] += h0 * np.exp(
93 -((x - hxmax_2) ** 2) / (sig * lon.size / 4.0) ** 2
94 - (y - hymax_2) ** 2 / (sig * lat.size / 7.0) ** 2
95 )
96
97 V[:-1, :, t] = -np.diff(P[:, :, t], axis=0) / dx / corio_0 * g
98 V[-1, :, t] = V[-2, :, t] # Fill in the last column
99
100 U[:, :-1, t] = np.diff(P[:, :, t], axis=1) / dy / corio_0 * g
101 U[:, -1, t] = U[:, -2, t] # Fill in the last row
102
103 data = {"U": U, "V": V, "P": P}
104 dimensions = {"lon": lon, "lat": lat, "time": time}
105
106 fieldset = parcels.FieldSet.from_data(data, dimensions, transpose=True, mesh=mesh)
107
108 # setting some constants for AdvectionRK45 kernel
109 fieldset.RK45_min_dt = 1e-3
110 fieldset.RK45_max_dt = 1e2
111 fieldset.RK45_tol = 1e-5
112 return fieldset
113
114
115def moving_eddies_example(
116 fieldset, outfile, npart=2, mode="jit", verbose=False, method=parcels.AdvectionRK4
117):
118 """Configuration of a particle set that follows two moving eddies.
119
120
121 Parameters
122 ----------
123 fieldset :
124 :class FieldSet: that defines the flow field
125 outfile :
126
127 npart :
128 Number of particles to initialise. (Default value = 2)
129 mode :
130 (Default value = 'jit')
131 verbose :
132 (Default value = False)
133 method :
134 (Default value = AdvectionRK4)
135 """
136 # Determine particle class according to mode
137 start = (3.3, 46.0) if fieldset.U.grid.mesh == "spherical" else (3.3e5, 1e5)
138 finish = (3.3, 47.8) if fieldset.U.grid.mesh == "spherical" else (3.3e5, 2.8e5)
139 pset = parcels.ParticleSet.from_line(
140 fieldset=fieldset, size=npart, pclass=ptype[mode], start=start, finish=finish
141 )
142
143 if verbose:
144 print(f"Initial particle positions:\n{pset}")
145
146 # Execute for 1 week, with 1 hour timesteps and hourly output
147 runtime = timedelta(days=7)
148 print("MovingEddies: Advecting %d particles for %s" % (npart, str(runtime)))
149 pset.execute(
150 method,
151 runtime=runtime,
152 dt=timedelta(hours=1),
153 output_file=pset.ParticleFile(name=outfile, outputdt=timedelta(hours=1)),
154 )
155
156 if verbose:
157 print(f"Final particle positions:\n{pset}")
158
159 return pset
160
161
162@pytest.mark.parametrize("mode", ["scipy", "jit"])
163@pytest.mark.parametrize("mesh", ["flat", "spherical"])
164def test_moving_eddies_fwdbwd(mode, mesh, tmpdir, npart=2):
165 method = parcels.AdvectionRK4
166 fieldset = moving_eddies_fieldset(mesh=mesh)
167
168 # Determine particle class according to mode
169 lons = [3.3, 3.3] if fieldset.U.grid.mesh == "spherical" else [3.3e5, 3.3e5]
170 lats = [46.0, 47.8] if fieldset.U.grid.mesh == "spherical" else [1e5, 2.8e5]
171 pset = parcels.ParticleSet(
172 fieldset=fieldset, pclass=ptype[mode], lon=lons, lat=lats
173 )
174
175 # Execte for 14 days, with 30sec timesteps and hourly output
176 runtime = timedelta(days=1)
177 dt = timedelta(minutes=5)
178 outputdt = timedelta(hours=1)
179 print("MovingEddies: Advecting %d particles for %s" % (npart, str(runtime)))
180 outfile = tmpdir.join("EddyParticlefwd")
181 pset.execute(
182 method,
183 runtime=runtime,
184 dt=dt,
185 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
186 )
187
188 print("Now running in backward time mode")
189 outfile = tmpdir.join("EddyParticlebwd")
190 pset.execute(
191 method,
192 endtime=0,
193 dt=-dt,
194 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
195 )
196
197 # Also include last timestep
198 for var in ["lon", "lat", "depth", "time"]:
199 pset.particledata.setallvardata(
200 f"{var}", pset.particledata.getvardata(f"{var}_nextloop")
201 )
202
203 assert np.allclose(pset.lon, lons)
204 assert np.allclose(pset.lat, lats)
205
206
207@pytest.mark.parametrize("mode", ["scipy", "jit"])
208@pytest.mark.parametrize("mesh", ["flat", "spherical"])
209def test_moving_eddies_fieldset(mode, mesh, tmpdir):
210 fieldset = moving_eddies_fieldset(mesh=mesh)
211 outfile = tmpdir.join("EddyParticle")
212 pset = moving_eddies_example(fieldset, outfile, 2, mode=mode)
213 # Also include last timestep
214 for var in ["lon", "lat", "depth", "time"]:
215 pset.particledata.setallvardata(
216 f"{var}", pset.particledata.getvardata(f"{var}_nextloop")
217 )
218 if mesh == "flat":
219 assert pset[0].lon < 2.2e5 and 1.1e5 < pset[0].lat < 1.2e5
220 assert pset[1].lon < 2.2e5 and 3.7e5 < pset[1].lat < 3.8e5
221 else:
222 assert pset[0].lon < 2.0 and 46.2 < pset[0].lat < 46.25
223 assert pset[1].lon < 2.0 and 48.8 < pset[1].lat < 48.85
224
225
226def fieldsetfile(mesh, tmpdir):
227 """Generate fieldset files for moving_eddies test."""
228 filename = tmpdir.join("moving_eddies")
229 fieldset = moving_eddies_fieldset(200, 350, mesh=mesh)
230 fieldset.write(filename)
231 return filename
232
233
234@pytest.mark.parametrize("mode", ["scipy", "jit"])
235@pytest.mark.parametrize("mesh", ["flat", "spherical"])
236def test_moving_eddies_file(mode, mesh, tmpdir):
237 gc.collect()
238 fieldset = parcels.FieldSet.from_parcels(
239 fieldsetfile(mesh, tmpdir), extra_fields={"P": "P"}
240 )
241 outfile = tmpdir.join("EddyParticle")
242 pset = moving_eddies_example(fieldset, outfile, 2, mode=mode)
243 # Also include last timestep
244 for var in ["lon", "lat", "depth", "time"]:
245 pset.particledata.setallvardata(
246 f"{var}", pset.particledata.getvardata(f"{var}_nextloop")
247 )
248 if mesh == "flat":
249 assert pset[0].lon < 2.2e5 and 1.1e5 < pset[0].lat < 1.2e5
250 assert pset[1].lon < 2.2e5 and 3.7e5 < pset[1].lat < 3.8e5
251 else:
252 assert pset[0].lon < 2.0 and 46.2 < pset[0].lat < 46.25
253 assert pset[1].lon < 2.0 and 48.8 < pset[1].lat < 48.85
254
255
256@pytest.mark.parametrize("mode", ["scipy", "jit"])
257def test_periodic_and_computeTimeChunk_eddies(mode):
258 data_folder = parcels.download_example_dataset("MovingEddies_data")
259 filename = str(data_folder / "moving_eddies")
260
261 fieldset = parcels.FieldSet.from_parcels(filename)
262 fieldset.add_constant("halo_west", fieldset.U.grid.lon[0])
263 fieldset.add_constant("halo_east", fieldset.U.grid.lon[-1])
264 fieldset.add_constant("halo_south", fieldset.U.grid.lat[0])
265 fieldset.add_constant("halo_north", fieldset.U.grid.lat[-1])
266 fieldset.add_periodic_halo(zonal=True, meridional=True)
267 pset = parcels.ParticleSet.from_list(
268 fieldset=fieldset, pclass=ptype[mode], lon=[3.3, 3.3], lat=[46.0, 47.8]
269 )
270
271 def periodicBC(particle, fieldset, time):
272 if particle.lon < fieldset.halo_west:
273 particle_dlon += fieldset.halo_east - fieldset.halo_west # noqa
274 elif particle.lon > fieldset.halo_east:
275 particle_dlon -= fieldset.halo_east - fieldset.halo_west
276 if particle.lat < fieldset.halo_south:
277 particle_dlat += fieldset.halo_north - fieldset.halo_south # noqa
278 elif particle.lat > fieldset.halo_north:
279 particle_dlat -= fieldset.halo_north - fieldset.halo_south
280
281 def slowlySouthWestward(particle, fieldset, time):
282 particle_dlon -= 5 * particle.dt / 1e5 # noqa
283 particle_dlat -= 3 * particle.dt / 1e5 # noqa
284
285 kernels = pset.Kernel(parcels.AdvectionRK4) + slowlySouthWestward + periodicBC
286 pset.execute(kernels, runtime=timedelta(days=6), dt=timedelta(hours=1))
287
288
289def main(args=None):
290 p = ArgumentParser(
291 description="""
292Example of particle advection around an idealised peninsula"""
293 )
294 p.add_argument(
295 "mode",
296 choices=("scipy", "jit"),
297 nargs="?",
298 default="jit",
299 help="Execution mode for performing RK4 computation",
300 )
301 p.add_argument(
302 "-p", "--particles", type=int, default=2, help="Number of particles to advect"
303 )
304 p.add_argument(
305 "-v",
306 "--verbose",
307 action="store_true",
308 default=False,
309 help="Print particle information before and after execution",
310 )
311 p.add_argument(
312 "--profiling",
313 action="store_true",
314 default=False,
315 help="Print profiling information after run",
316 )
317 p.add_argument(
318 "-f",
319 "--fieldset",
320 type=int,
321 nargs=2,
322 default=None,
323 help="Generate fieldset file with given dimensions",
324 )
325 p.add_argument(
326 "-m",
327 "--method",
328 choices=("RK4", "EE", "RK45"),
329 default="RK4",
330 help="Numerical method used for advection",
331 )
332 args = p.parse_args(args)
333 data_folder = parcels.download_example_dataset("MovingEddies_data")
334 filename = str(data_folder / "moving_eddies")
335
336 # Generate fieldset files according to given dimensions
337 if args.fieldset is not None:
338 fieldset = moving_eddies_fieldset(
339 args.fieldset[0], args.fieldset[1], mesh="flat"
340 )
341 else:
342 fieldset = moving_eddies_fieldset(mesh="flat")
343 outfile = "EddyParticle"
344
345 if args.profiling:
346 from cProfile import runctx
347 from pstats import Stats
348
349 runctx(
350 "moving_eddies_example(fieldset, outfile, args.particles, mode=args.mode, \
351 verbose=args.verbose, method=method[args.method])",
352 globals(),
353 locals(),
354 "Profile.prof",
355 )
356 Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
357 else:
358 moving_eddies_example(
359 fieldset,
360 outfile,
361 args.particles,
362 mode=args.mode,
363 verbose=args.verbose,
364 method=method[args.method],
365 )
366
367
368if __name__ == "__main__":
369 main()
example_nemo_curvilinear.py#
1"""Example script that runs a set of particles in a NEMO curvilinear grid."""
2
3from argparse import ArgumentParser
4from datetime import timedelta
5from glob import glob
6
7import numpy as np
8import parcels
9import pytest
10
11ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
12advection = {"RK4": parcels.AdvectionRK4, "AA": parcels.AdvectionAnalytical}
13
14
15def run_nemo_curvilinear(mode, outfile, advtype="RK4"):
16 """Run parcels on the NEMO curvilinear grid."""
17 data_folder = parcels.download_example_dataset("NemoCurvilinear_data")
18
19 filenames = {
20 "U": {
21 "lon": f"{data_folder}/mesh_mask.nc4",
22 "lat": f"{data_folder}/mesh_mask.nc4",
23 "data": f"{data_folder}/U_purely_zonal-ORCA025_grid_U.nc4",
24 },
25 "V": {
26 "lon": f"{data_folder}/mesh_mask.nc4",
27 "lat": f"{data_folder}/mesh_mask.nc4",
28 "data": f"{data_folder}/V_purely_zonal-ORCA025_grid_V.nc4",
29 },
30 }
31 variables = {"U": "U", "V": "V"}
32 dimensions = {"lon": "glamf", "lat": "gphif"}
33 chunksize = {"lat": ("y", 256), "lon": ("x", 512)}
34 fieldset = parcels.FieldSet.from_nemo(
35 filenames, variables, dimensions, chunksize=chunksize
36 )
37 assert fieldset.U.chunksize == chunksize
38
39 # Now run particles as normal
40 npart = 20
41 lonp = 30 * np.ones(npart)
42 if advtype == "RK4":
43 latp = np.linspace(-70, 88, npart)
44 runtime = timedelta(days=160)
45 else:
46 latp = np.linspace(-70, 70, npart)
47 runtime = timedelta(days=15)
48
49 def periodicBC(particle, fieldSet, time):
50 if particle.lon > 180:
51 particle_dlon -= 360 # noqa
52
53 pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp)
54 pfile = parcels.ParticleFile(outfile, pset, outputdt=timedelta(days=1))
55 kernels = pset.Kernel(advection[advtype]) + periodicBC
56 pset.execute(kernels, runtime=runtime, dt=timedelta(hours=6), output_file=pfile)
57 assert np.allclose(pset.lat - latp, 0, atol=2e-2)
58
59
60@pytest.mark.parametrize("mode", ["jit"]) # Only testing jit as scipy is very slow
61def test_nemo_curvilinear(mode, tmpdir):
62 """Test the NEMO curvilinear example."""
63 outfile = tmpdir.join("nemo_particles")
64 run_nemo_curvilinear(mode, outfile)
65
66
67def test_nemo_curvilinear_AA(tmpdir):
68 """Test the NEMO curvilinear example with analytical advection."""
69 outfile = tmpdir.join("nemo_particlesAA")
70 run_nemo_curvilinear("scipy", outfile, "AA")
71
72
73def test_nemo_3D_samegrid():
74 """Test that the same grid is used for U and V in 3D NEMO fields."""
75 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
76 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
77 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
78 wfiles = sorted(glob(f"{data_folder}/ORCA*W.nc"))
79 mesh_mask = f"{data_folder}/coordinates.nc"
80
81 filenames = {
82 "U": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": ufiles},
83 "V": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": vfiles},
84 "W": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": wfiles},
85 }
86
87 variables = {"U": "uo", "V": "vo", "W": "wo"}
88 dimensions = {
89 "U": {
90 "lon": "glamf",
91 "lat": "gphif",
92 "depth": "depthw",
93 "time": "time_counter",
94 },
95 "V": {
96 "lon": "glamf",
97 "lat": "gphif",
98 "depth": "depthw",
99 "time": "time_counter",
100 },
101 "W": {
102 "lon": "glamf",
103 "lat": "gphif",
104 "depth": "depthw",
105 "time": "time_counter",
106 },
107 }
108
109 fieldset = parcels.FieldSet.from_nemo(filenames, variables, dimensions)
110
111 assert fieldset.U._dataFiles is not fieldset.W._dataFiles
112
113
114def main(args=None):
115 """Run the example with given arguments."""
116 p = ArgumentParser(description="""Chose the mode using mode option""")
117 p.add_argument(
118 "mode",
119 choices=("scipy", "jit"),
120 nargs="?",
121 default="jit",
122 help="Execution mode for performing computation",
123 )
124 args = p.parse_args(args)
125
126 outfile = "nemo_particles"
127
128 run_nemo_curvilinear(args.mode, outfile)
129
130
131if __name__ == "__main__":
132 main()
example_ofam.py#
1import gc
2from datetime import timedelta
3
4import numpy as np
5import parcels
6import pytest
7import xarray as xr
8
9ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
10
11
12def set_ofam_fieldset(deferred_load=True, use_xarray=False):
13 data_folder = parcels.download_example_dataset("OFAM_example_data")
14 filenames = {
15 "U": f"{data_folder}/OFAM_simple_U.nc",
16 "V": f"{data_folder}/OFAM_simple_V.nc",
17 }
18 variables = {"U": "u", "V": "v"}
19 dimensions = {
20 "lat": "yu_ocean",
21 "lon": "xu_ocean",
22 "depth": "st_ocean",
23 "time": "Time",
24 }
25 if use_xarray:
26 ds = xr.open_mfdataset([filenames["U"], filenames["V"]], combine="by_coords")
27 return parcels.FieldSet.from_xarray_dataset(
28 ds, variables, dimensions, allow_time_extrapolation=True
29 )
30 else:
31 return parcels.FieldSet.from_netcdf(
32 filenames,
33 variables,
34 dimensions,
35 allow_time_extrapolation=True,
36 deferred_load=deferred_load,
37 chunksize=False,
38 )
39
40
41@pytest.mark.parametrize("use_xarray", [True, False])
42def test_ofam_fieldset_fillvalues(use_xarray):
43 fieldset = set_ofam_fieldset(deferred_load=False, use_xarray=use_xarray)
44 # V.data[0, 0, 150] is a landpoint, that makes NetCDF4 generate a masked array, instead of an ndarray
45 assert fieldset.V.data[0, 0, 150] == 0
46
47
48@pytest.mark.parametrize("dt", [timedelta(minutes=-5), timedelta(minutes=5)])
49def test_ofam_xarray_vs_netcdf(dt):
50 fieldsetNetcdf = set_ofam_fieldset(use_xarray=False)
51 fieldsetxarray = set_ofam_fieldset(use_xarray=True)
52 lonstart, latstart, runtime = (180, 10, timedelta(days=7))
53
54 psetN = parcels.ParticleSet(
55 fieldsetNetcdf, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
56 )
57 psetN.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
58
59 psetX = parcels.ParticleSet(
60 fieldsetxarray, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
61 )
62 psetX.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
63
64 assert np.allclose(psetN[0].lon, psetX[0].lon)
65 assert np.allclose(psetN[0].lat, psetX[0].lat)
66
67
68@pytest.mark.parametrize("use_xarray", [True, False])
69@pytest.mark.parametrize("mode", ["scipy", "jit"])
70def test_ofam_particles(mode, use_xarray):
71 gc.collect()
72 fieldset = set_ofam_fieldset(use_xarray=use_xarray)
73
74 lonstart = [180]
75 latstart = [10]
76 depstart = [2.5] # the depth of the first layer in OFAM
77
78 pset = parcels.ParticleSet(
79 fieldset, pclass=ptype[mode], lon=lonstart, lat=latstart, depth=depstart
80 )
81
82 pset.execute(
83 parcels.AdvectionRK4, runtime=timedelta(days=10), dt=timedelta(minutes=5)
84 )
85
86 assert abs(pset[0].lon - 173) < 1
87 assert abs(pset[0].lat - 11) < 1
example_peninsula.py#
1import gc
2import math # NOQA
3from argparse import ArgumentParser
4from datetime import timedelta
5
6import numpy as np
7import parcels
8import pytest
9
10ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
11method = {
12 "RK4": parcels.AdvectionRK4,
13 "EE": parcels.AdvectionEE,
14 "RK45": parcels.AdvectionRK45,
15}
16
17
18def peninsula_fieldset(xdim, ydim, mesh="flat", grid_type="A"):
19 """Construct a fieldset encapsulating the flow field around an idealised peninsula.
20
21 Parameters
22 ----------
23 xdim :
24 Horizontal dimension of the generated fieldset
25 xdim :
26 Vertical dimension of the generated fieldset
27 mesh : str
28 String indicating the type of mesh coordinates and
29 units used during velocity interpolation:
30
31 1. spherical: Lat and lon in degree, with a
32 correction for zonal velocity U near the poles.
33 2. flat (default): No conversion, lat/lon are assumed to be in m.
34 grid_type :
35 Option whether grid is either Arakawa A (default) or C
36
37 The original test description can be found in Fig. 2.2.3 in:
38 North, E. W., Gallego, A., Petitgas, P. (Eds). 2009. Manual of
39 recommended practices for modelling physical - biological
40 interactions during fish early life.
41 ICES Cooperative Research Report No. 295. 111 pp.
42 http://archimer.ifremer.fr/doc/00157/26792/24888.pdf
43 ydim :
44
45
46 """
47 # Set Parcels FieldSet variables
48
49 # Generate the original test setup on A-grid in m
50 domainsizeX, domainsizeY = (1.0e5, 5.0e4)
51 La = np.linspace(1e3, domainsizeX, xdim, dtype=np.float32)
52 Wa = np.linspace(1e3, domainsizeY, ydim, dtype=np.float32)
53
54 u0 = 1
55 x0 = domainsizeX / 2
56 R = 0.32 * domainsizeX / 2
57
58 # Create the fields
59 x, y = np.meshgrid(La, Wa, sparse=True, indexing="xy")
60 P = (u0 * R**2 * y / ((x - x0) ** 2 + y**2) - u0 * y) / 1e3
61
62 if grid_type == "A":
63 U = u0 - u0 * R**2 * ((x - x0) ** 2 - y**2) / (((x - x0) ** 2 + y**2) ** 2)
64 V = -2 * u0 * R**2 * ((x - x0) * y) / (((x - x0) ** 2 + y**2) ** 2)
65 elif grid_type == "C":
66 U = np.zeros(P.shape)
67 V = np.zeros(P.shape)
68 V[:, 1:] = P[:, 1:] - P[:, :-1]
69 U[1:, :] = -(P[1:, :] - P[:-1, :])
70 else:
71 raise RuntimeError(f"Grid_type {grid_type} is not a valid option")
72
73 # Set land points to NaN
74 landpoints = P >= 0.0
75 P[landpoints] = np.nan
76 U[landpoints] = np.nan
77 V[landpoints] = np.nan
78
79 # Convert from m to lat/lon for spherical meshes
80 lon = La / 1852.0 / 60.0 if mesh == "spherical" else La
81 lat = Wa / 1852.0 / 60.0 if mesh == "spherical" else Wa
82
83 data = {"U": U, "V": V, "P": P}
84 dimensions = {"lon": lon, "lat": lat}
85
86 fieldset = parcels.FieldSet.from_data(data, dimensions, mesh=mesh)
87 if grid_type == "C":
88 fieldset.U.interp_method = "cgrid_velocity"
89 fieldset.V.interp_method = "cgrid_velocity"
90 return fieldset
91
92
93def UpdateP(particle, fieldset, time):
94 if time == 0:
95 particle.p_start = fieldset.P[time, particle.depth, particle.lat, particle.lon]
96 particle.p = fieldset.P[time, particle.depth, particle.lat, particle.lon]
97
98
99def peninsula_example(
100 fieldset,
101 outfile,
102 npart,
103 mode="jit",
104 degree=1,
105 verbose=False,
106 output=True,
107 method=parcels.AdvectionRK4,
108):
109 """Example configuration of particle flow around an idealised Peninsula
110
111 Parameters
112 ----------
113 fieldset :
114
115 outfile : str
116 Basename of the input fieldset.
117 npart : int
118 Number of particles to intialise.
119 mode :
120 (Default value = 'jit')
121 degree :
122 (Default value = 1)
123 verbose :
124 (Default value = False)
125 output :
126 (Default value = True)
127 method :
128 (Default value = AdvectionRK4)
129
130 """
131 # First, we define a custom Particle class to which we add a
132 # custom variable, the initial stream function value p.
133 # We determine the particle base class according to mode.
134 MyParticle = ptype[mode].add_variable(
135 [
136 parcels.Variable("p", dtype=np.float32, initial=0.0),
137 parcels.Variable("p_start", dtype=np.float32, initial=0),
138 ]
139 )
140
141 # Initialise particles
142 if fieldset.U.grid.mesh == "flat":
143 x = 3000 # 3 km offset from boundary
144 else:
145 x = 3.0 * (1.0 / 1.852 / 60) # 3 km offset from boundary
146 y = (
147 fieldset.U.lat[0] + x,
148 fieldset.U.lat[-1] - x,
149 ) # latitude range, including offsets
150 pset = parcels.ParticleSet.from_line(
151 fieldset,
152 size=npart,
153 pclass=MyParticle,
154 start=(x, y[0]),
155 finish=(x, y[1]),
156 time=0,
157 )
158
159 if verbose:
160 print(f"Initial particle positions:\n{pset}")
161
162 # Advect the particles for 24h
163 time = timedelta(hours=24)
164 dt = timedelta(minutes=5)
165 k_adv = pset.Kernel(method)
166 k_p = pset.Kernel(UpdateP)
167 out = (
168 pset.ParticleFile(name=outfile, outputdt=timedelta(hours=1)) if output else None
169 )
170 print("Peninsula: Advecting %d particles for %s" % (npart, str(time)))
171 pset.execute(k_adv + k_p, runtime=time, dt=dt, output_file=out)
172
173 if verbose:
174 print(f"Final particle positions:\n{pset}")
175
176 return pset
177
178
179@pytest.mark.parametrize("mode", ["scipy", "jit"])
180@pytest.mark.parametrize("mesh", ["flat", "spherical"])
181def test_peninsula_fieldset(mode, mesh, tmpdir):
182 """Execute peninsula test from fieldset generated in memory."""
183 fieldset = peninsula_fieldset(100, 50, mesh)
184 outfile = tmpdir.join("Peninsula")
185 pset = peninsula_example(fieldset, outfile, 5, mode=mode, degree=1)
186 # Test advection accuracy by comparing streamline values
187 err_adv = np.abs(pset.p_start - pset.p)
188 assert (err_adv <= 1.0e-3).all()
189 # Test Field sampling accuracy by comparing kernel against Field sampling
190 err_smpl = np.array(
191 [
192 abs(
193 pset.p[i]
194 - pset.fieldset.P[0.0, pset.depth[i], pset.lat[i], pset.lon[i]]
195 )
196 for i in range(pset.size)
197 ]
198 )
199 assert (err_smpl <= 1.0e-3).all()
200
201
202@pytest.mark.parametrize(
203 "mode", ["scipy"]
204) # Analytical Advection only implemented in Scipy mode
205@pytest.mark.parametrize("mesh", ["flat", "spherical"])
206def test_peninsula_fieldset_AnalyticalAdvection(mode, mesh, tmpdir):
207 """Execute peninsula test using Analytical Advection on C grid."""
208 fieldset = peninsula_fieldset(101, 51, "flat", grid_type="C")
209 outfile = tmpdir.join("PeninsulaAA")
210 pset = peninsula_example(
211 fieldset, outfile, npart=10, mode=mode, method=parcels.AdvectionAnalytical
212 )
213 # Test advection accuracy by comparing streamline values
214 err_adv = np.array([abs(p.p_start - p.p) for p in pset])
215
216 tol = {"scipy": 3.0e-1, "jit": 1.0e-1}.get(mode)
217 assert (err_adv <= tol).all()
218
219
220def fieldsetfile(mesh, tmpdir):
221 """Generate fieldset files for peninsula test."""
222 filename = tmpdir.join("peninsula")
223 fieldset = peninsula_fieldset(100, 50, mesh=mesh)
224 fieldset.write(filename)
225 return filename
226
227
228@pytest.mark.parametrize("mode", ["scipy", "jit"])
229@pytest.mark.parametrize("mesh", ["flat", "spherical"])
230def test_peninsula_file(mode, mesh, tmpdir):
231 """Open fieldset files and execute."""
232 gc.collect()
233 fieldset = parcels.FieldSet.from_parcels(
234 fieldsetfile(mesh, tmpdir),
235 extra_fields={"P": "P"},
236 allow_time_extrapolation=True,
237 )
238 outfile = tmpdir.join("Peninsula")
239 pset = peninsula_example(fieldset, outfile, 5, mode=mode, degree=1)
240 # Test advection accuracy by comparing streamline values
241 err_adv = np.abs(pset.p_start - pset.p)
242 assert (err_adv <= 1.0e-3).all()
243 # Test Field sampling accuracy by comparing kernel against Field sampling
244 err_smpl = np.array(
245 [
246 abs(
247 pset.p[i]
248 - pset.fieldset.P[0.0, pset.depth[i], pset.lat[i], pset.lon[i]]
249 )
250 for i in range(pset.size)
251 ]
252 )
253 assert (err_smpl <= 1.0e-3).all()
254
255
256def main(args=None):
257 p = ArgumentParser(
258 description="""
259Example of particle advection around an idealised peninsula"""
260 )
261 p.add_argument(
262 "mode",
263 choices=("scipy", "jit"),
264 nargs="?",
265 default="jit",
266 help="Execution mode for performing RK4 computation",
267 )
268 p.add_argument(
269 "-p", "--particles", type=int, default=20, help="Number of particles to advect"
270 )
271 p.add_argument(
272 "-d", "--degree", type=int, default=1, help="Degree of spatial interpolation"
273 )
274 p.add_argument(
275 "-v",
276 "--verbose",
277 action="store_true",
278 default=False,
279 help="Print particle information before and after execution",
280 )
281 p.add_argument(
282 "-o",
283 "--nooutput",
284 action="store_true",
285 default=False,
286 help="Suppress trajectory output",
287 )
288 p.add_argument(
289 "--profiling",
290 action="store_true",
291 default=False,
292 help="Print profiling information after run",
293 )
294 p.add_argument(
295 "-f",
296 "--fieldset",
297 type=int,
298 nargs=2,
299 default=None,
300 help="Generate fieldset file with given dimensions",
301 )
302 p.add_argument(
303 "-m",
304 "--method",
305 choices=("RK4", "EE", "RK45"),
306 default="RK4",
307 help="Numerical method used for advection",
308 )
309 args = p.parse_args(args)
310
311 filename = "peninsula"
312 if args.fieldset is not None:
313 fieldset = peninsula_fieldset(args.fieldset[0], args.fieldset[1], mesh="flat")
314 else:
315 fieldset = peninsula_fieldset(100, 50, mesh="flat")
316 fieldset.write(filename)
317
318 # Open fieldset file set
319 fieldset = parcels.FieldSet.from_parcels(
320 "peninsula", extra_fields={"P": "P"}, allow_time_extrapolation=True
321 )
322 outfile = "Peninsula"
323
324 if args.profiling:
325 from cProfile import runctx
326 from pstats import Stats
327
328 runctx(
329 "peninsula_example(fieldset, outfile, args.particles, mode=args.mode,\
330 degree=args.degree, verbose=args.verbose,\
331 output=not args.nooutput, method=method[args.method])",
332 globals(),
333 locals(),
334 "Profile.prof",
335 )
336 Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
337 else:
338 peninsula_example(
339 fieldset,
340 outfile,
341 args.particles,
342 mode=args.mode,
343 degree=args.degree,
344 verbose=args.verbose,
345 output=not args.nooutput,
346 method=method[args.method],
347 )
348
349
350if __name__ == "__main__":
351 main()
example_radial_rotation.py#
1import math
2from datetime import timedelta
3
4import numpy as np
5import parcels
6import pytest
7
8ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
9
10
11def radial_rotation_fieldset(
12 xdim=200, ydim=200
13): # Define 2D flat, square fieldset for testing purposes.
14 lon = np.linspace(0, 60, xdim, dtype=np.float32)
15 lat = np.linspace(0, 60, ydim, dtype=np.float32)
16
17 x0 = 30.0 # Define the origin to be the centre of the Field.
18 y0 = 30.0
19
20 U = np.zeros((ydim, xdim), dtype=np.float32)
21 V = np.zeros((ydim, xdim), dtype=np.float32)
22
23 T = timedelta(days=1)
24 omega = 2 * np.pi / T.total_seconds() # Define the rotational period as 1 day.
25
26 for i in range(lon.size):
27 for j in range(lat.size):
28 r = np.sqrt(
29 (lon[i] - x0) ** 2 + (lat[j] - y0) ** 2
30 ) # Define radial displacement.
31 assert r >= 0.0
32 assert r <= np.sqrt(x0**2 + y0**2)
33
34 theta = math.atan2((lat[j] - y0), (lon[i] - x0)) # Define the polar angle.
35 assert abs(theta) <= np.pi
36
37 U[j, i] = r * math.sin(theta) * omega
38 V[j, i] = -r * math.cos(theta) * omega
39
40 data = {"U": U, "V": V}
41 dimensions = {"lon": lon, "lat": lat}
42 return parcels.FieldSet.from_data(data, dimensions, mesh="flat")
43
44
45def true_values(age): # Calculate the expected values for particle 2 at the endtime.
46 x = 20 * math.sin(2 * np.pi * age / (24.0 * 60.0**2)) + 30.0
47 y = 20 * math.cos(2 * np.pi * age / (24.0 * 60.0**2)) + 30.0
48
49 return [x, y]
50
51
52def rotation_example(fieldset, outfile, mode="jit", method=parcels.AdvectionRK4):
53 npart = 2 # Test two particles on the rotating fieldset.
54 pset = parcels.ParticleSet.from_line(
55 fieldset,
56 size=npart,
57 pclass=ptype[mode],
58 start=(30.0, 30.0),
59 finish=(30.0, 50.0),
60 ) # One particle in centre, one on periphery of Field.
61
62 runtime = timedelta(hours=17)
63 dt = timedelta(minutes=5)
64 outputdt = timedelta(hours=1)
65
66 pset.execute(
67 method,
68 runtime=runtime,
69 dt=dt,
70 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
71 )
72
73 return pset
74
75
76@pytest.mark.parametrize("mode", ["scipy", "jit"])
77def test_rotation_example(mode, tmpdir):
78 fieldset = radial_rotation_fieldset()
79 outfile = tmpdir.join("RadialParticle")
80 pset = rotation_example(fieldset, outfile, mode=mode)
81 assert (
82 pset[0].lon == 30.0 and pset[0].lat == 30.0
83 ) # Particle at centre of Field remains stationary.
84 vals = true_values(pset.time[1])
85 assert np.allclose(
86 pset[1].lon, vals[0], 1e-5
87 ) # Check advected values against calculated values.
88 assert np.allclose(pset[1].lat, vals[1], 1e-5)
89
90
91def main():
92 filename = "radial_rotation"
93 fieldset = radial_rotation_fieldset()
94 fieldset.write(filename)
95
96 outfile = "RadialParticle"
97 rotation_example(fieldset, outfile)
98
99
100if __name__ == "__main__":
101 main()
example_stommel.py#
1import math
2from argparse import ArgumentParser
3from datetime import timedelta
4
5import numpy as np
6import parcels
7import pytest
8
9ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
10method = {
11 "RK4": parcels.AdvectionRK4,
12 "EE": parcels.AdvectionEE,
13 "RK45": parcels.AdvectionRK45,
14}
15
16
17def stommel_fieldset(xdim=200, ydim=200, grid_type="A"):
18 """Simulate a periodic current along a western boundary, with significantly
19 larger velocities along the western edge than the rest of the region
20
21 The original test description can be found in: N. Fabbroni, 2009,
22 Numerical Simulation of Passive tracers dispersion in the sea,
23 Ph.D. dissertation, University of Bologna
24 http://amsdottorato.unibo.it/1733/1/Fabbroni_Nicoletta_Tesi.pdf
25 """
26 a = b = 10000 * 1e3
27 scalefac = 0.05 # to scale for physically meaningful velocities
28 dx, dy = a / xdim, b / ydim
29
30 # Coordinates of the test fieldset (on A-grid in deg)
31 lon = np.linspace(0, a, xdim, dtype=np.float32)
32 lat = np.linspace(0, b, ydim, dtype=np.float32)
33
34 # Define arrays U (zonal), V (meridional) and P (sea surface height)
35 U = np.zeros((lat.size, lon.size), dtype=np.float32)
36 V = np.zeros((lat.size, lon.size), dtype=np.float32)
37 P = np.zeros((lat.size, lon.size), dtype=np.float32)
38
39 beta = 2e-11
40 r = 1 / (11.6 * 86400)
41 es = r / (beta * a)
42
43 for j in range(lat.size):
44 for i in range(lon.size):
45 xi = lon[i] / a
46 yi = lat[j] / b
47 P[j, i] = (
48 (1 - math.exp(-xi / es) - xi)
49 * math.pi
50 * np.sin(math.pi * yi)
51 * scalefac
52 )
53 if grid_type == "A":
54 U[j, i] = (
55 -(1 - math.exp(-xi / es) - xi)
56 * math.pi**2
57 * np.cos(math.pi * yi)
58 * scalefac
59 )
60 V[j, i] = (
61 (math.exp(-xi / es) / es - 1)
62 * math.pi
63 * np.sin(math.pi * yi)
64 * scalefac
65 )
66 if grid_type == "C":
67 V[:, 1:] = (P[:, 1:] - P[:, 0:-1]) / dx * a
68 U[1:, :] = -(P[1:, :] - P[0:-1, :]) / dy * b
69
70 data = {"U": U, "V": V, "P": P}
71 dimensions = {"lon": lon, "lat": lat}
72 fieldset = parcels.FieldSet.from_data(data, dimensions, mesh="flat")
73 if grid_type == "C":
74 fieldset.U.interp_method = "cgrid_velocity"
75 fieldset.V.interp_method = "cgrid_velocity"
76 return fieldset
77
78
79def UpdateP(particle, fieldset, time):
80 if time == 0:
81 particle.p_start = fieldset.P[time, particle.depth, particle.lat, particle.lon]
82 particle.p = fieldset.P[time, particle.depth, particle.lat, particle.lon]
83
84
85def AgeP(particle, fieldset, time):
86 particle.age += particle.dt
87 if particle.age > fieldset.maxage:
88 particle.delete()
89
90
91def simple_partition_function(coords, mpi_size=1):
92 """A very simple partition function that assigns particles to processors (for MPI testing purposes))"""
93 return np.linspace(0, mpi_size, coords.shape[0], endpoint=False, dtype=np.int32)
94
95
96def stommel_example(
97 npart=1,
98 mode="jit",
99 verbose=False,
100 method=parcels.AdvectionRK4,
101 grid_type="A",
102 outfile="StommelParticle.zarr",
103 repeatdt=None,
104 maxage=None,
105 write_fields=True,
106 custom_partition_function=False,
107):
108 parcels.timer.fieldset = parcels.timer.Timer(
109 "FieldSet", parent=parcels.timer.stommel
110 )
111 fieldset = stommel_fieldset(grid_type=grid_type)
112 if write_fields:
113 filename = "stommel"
114 fieldset.write(filename)
115 parcels.timer.fieldset.stop()
116
117 # Determine particle class according to mode
118 parcels.timer.pset = parcels.timer.Timer("Pset", parent=parcels.timer.stommel)
119 parcels.timer.psetinit = parcels.timer.Timer("Pset_init", parent=parcels.timer.pset)
120 ParticleClass = parcels.JITParticle if mode == "jit" else parcels.ScipyParticle
121
122 # Execute for 600 days, with 1-hour timesteps and 5-day output
123 runtime = timedelta(days=600)
124 dt = timedelta(hours=1)
125 outputdt = timedelta(days=5)
126
127 extra_vars = [
128 parcels.Variable("p", dtype=np.float32, initial=0.0),
129 parcels.Variable("p_start", dtype=np.float32, initial=0.0),
130 parcels.Variable("next_dt", dtype=np.float64, initial=dt.total_seconds()),
131 parcels.Variable("age", dtype=np.float32, initial=0.0),
132 ]
133 MyParticle = ParticleClass.add_variables(extra_vars)
134
135 if custom_partition_function:
136 pset = parcels.ParticleSet.from_line(
137 fieldset,
138 size=npart,
139 pclass=MyParticle,
140 repeatdt=repeatdt,
141 start=(10e3, 5000e3),
142 finish=(100e3, 5000e3),
143 time=0,
144 partition_function=simple_partition_function,
145 )
146 else:
147 pset = parcels.ParticleSet.from_line(
148 fieldset,
149 size=npart,
150 pclass=MyParticle,
151 repeatdt=repeatdt,
152 start=(10e3, 5000e3),
153 finish=(100e3, 5000e3),
154 time=0,
155 )
156
157 if verbose:
158 print(f"Initial particle positions:\n{pset}")
159
160 maxage = runtime.total_seconds() if maxage is None else maxage
161 fieldset.add_constant("maxage", maxage)
162 print("Stommel: Advecting %d particles for %s" % (npart, runtime))
163 parcels.timer.psetinit.stop()
164 parcels.timer.psetrun = parcels.timer.Timer("Pset_run", parent=parcels.timer.pset)
165 pset.execute(
166 method + pset.Kernel(UpdateP) + pset.Kernel(AgeP),
167 runtime=runtime,
168 dt=dt,
169 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
170 )
171
172 if verbose:
173 print(f"Final particle positions:\n{pset}")
174 parcels.timer.psetrun.stop()
175 parcels.timer.pset.stop()
176
177 return pset
178
179
180@pytest.mark.parametrize("grid_type", ["A", "C"])
181@pytest.mark.parametrize("mode", ["jit", "scipy"])
182def test_stommel_fieldset(mode, grid_type, tmpdir):
183 parcels.timer.root = parcels.timer.Timer("Main")
184 parcels.timer.stommel = parcels.timer.Timer("Stommel", parent=parcels.timer.root)
185 outfile = tmpdir.join("StommelParticle")
186 psetRK4 = stommel_example(
187 1,
188 mode=mode,
189 method=method["RK4"],
190 grid_type=grid_type,
191 outfile=outfile,
192 write_fields=False,
193 )
194 psetRK45 = stommel_example(
195 1,
196 mode=mode,
197 method=method["RK45"],
198 grid_type=grid_type,
199 outfile=outfile,
200 write_fields=False,
201 )
202 assert np.allclose(psetRK4.lon, psetRK45.lon, rtol=1e-3)
203 assert np.allclose(psetRK4.lat, psetRK45.lat, rtol=1.1e-3)
204 err_adv = np.abs(psetRK4.p_start - psetRK4.p)
205 assert (err_adv <= 1.0e-1).all()
206 err_smpl = np.array(
207 [
208 abs(
209 psetRK4.p[i]
210 - psetRK4.fieldset.P[
211 0.0, psetRK4.lon[i], psetRK4.lat[i], psetRK4.depth[i]
212 ]
213 )
214 for i in range(psetRK4.size)
215 ]
216 )
217 assert (err_smpl <= 1.0e-1).all()
218 parcels.timer.stommel.stop()
219 parcels.timer.root.stop()
220 parcels.timer.root.print_tree()
221
222
223def main(args=None):
224 parcels.timer.root = parcels.timer.Timer("Main")
225 parcels.timer.args = parcels.timer.Timer("Args", parent=parcels.timer.root)
226 p = ArgumentParser(
227 description="""
228Example of particle advection in the steady-state solution of the Stommel equation"""
229 )
230 p.add_argument(
231 "mode",
232 choices=("scipy", "jit"),
233 nargs="?",
234 default="jit",
235 help="Execution mode for performing computation",
236 )
237 p.add_argument(
238 "-p", "--particles", type=int, default=1, help="Number of particles to advect"
239 )
240 p.add_argument(
241 "-v",
242 "--verbose",
243 action="store_true",
244 default=False,
245 help="Print particle information before and after execution",
246 )
247 p.add_argument(
248 "-m",
249 "--method",
250 choices=("RK4", "EE", "RK45"),
251 default="RK4",
252 help="Numerical method used for advection",
253 )
254 p.add_argument(
255 "-o", "--outfile", default="StommelParticle.zarr", help="Name of output file"
256 )
257 p.add_argument(
258 "-r", "--repeatdt", default=None, type=int, help="repeatdt of the ParticleSet"
259 )
260 p.add_argument(
261 "-a",
262 "--maxage",
263 default=None,
264 type=int,
265 help="max age of the particles (after which particles are deleted)",
266 )
267 p.add_argument(
268 "-wf",
269 "--write_fields",
270 default=True,
271 help="Write the hydrodynamic fields to NetCDF",
272 )
273 p.add_argument(
274 "-cpf",
275 "--custom_partition_function",
276 default=False,
277 help="Use a custom partition_function (for MPI testing purposes)",
278 )
279 args = p.parse_args(args)
280
281 parcels.timer.args.stop()
282 parcels.timer.stommel = parcels.timer.Timer("Stommel", parent=parcels.timer.root)
283 stommel_example(
284 args.particles,
285 mode=args.mode,
286 verbose=args.verbose,
287 method=method[args.method],
288 outfile=args.outfile,
289 repeatdt=args.repeatdt,
290 maxage=args.maxage,
291 write_fields=args.write_fields,
292 custom_partition_function=args.custom_partition_function,
293 )
294 parcels.timer.stommel.stop()
295 parcels.timer.root.stop()
296 parcels.timer.root.print_tree()
297
298
299if __name__ == "__main__":
300 main()