Python Example Scripts#
example_brownian.py#
1from datetime import timedelta
2
3import numpy as np
4import pytest
5
6import parcels
7
8ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
9
10
11def mesh_conversion(mesh):
12 return (1852.0 * 60) if mesh == "spherical" else 1.0
13
14
15@pytest.mark.parametrize("mode", ["scipy", "jit"])
16@pytest.mark.parametrize("mesh", ["flat", "spherical"])
17def test_brownian_example(mode, mesh, npart=3000):
18 fieldset = parcels.FieldSet.from_data(
19 {"U": 0, "V": 0}, {"lon": 0, "lat": 0}, mesh=mesh
20 )
21
22 # Set diffusion constants.
23 kh_zonal = 100 # in m^2/s
24 kh_meridional = 100 # in m^2/s
25
26 # Create field of constant Kh_zonal and Kh_meridional
27 fieldset.add_field(parcels.Field("Kh_zonal", kh_zonal, lon=0, lat=0, mesh=mesh))
28 fieldset.add_field(
29 parcels.Field("Kh_meridional", kh_meridional, lon=0, lat=0, mesh=mesh)
30 )
31
32 # Set random seed
33 parcels.ParcelsRandom.seed(123456)
34
35 runtime = timedelta(days=1)
36
37 parcels.ParcelsRandom.seed(1234)
38 pset = parcels.ParticleSet(
39 fieldset=fieldset, pclass=ptype[mode], lon=np.zeros(npart), lat=np.zeros(npart)
40 )
41 pset.execute(
42 pset.Kernel(parcels.DiffusionUniformKh), runtime=runtime, dt=timedelta(hours=1)
43 )
44
45 expected_std_x = np.sqrt(2 * kh_zonal * runtime.total_seconds())
46 expected_std_y = np.sqrt(2 * kh_meridional * runtime.total_seconds())
47
48 ys = pset.lat * mesh_conversion(mesh)
49 xs = pset.lon * mesh_conversion(
50 mesh
51 ) # since near equator, we do not need to care about curvature effect
52
53 tol = 250 # 250m tolerance
54 assert np.allclose(np.std(xs), expected_std_x, atol=tol)
55 assert np.allclose(np.std(ys), expected_std_y, atol=tol)
56 assert np.allclose(np.mean(xs), 0, atol=tol)
57 assert np.allclose(np.mean(ys), 0, atol=tol)
58
59
60if __name__ == "__main__":
61 test_brownian_example("jit", "spherical", npart=2000)
example_dask_chunk_OCMs.py#
1import math
2from datetime import timedelta
3from glob import glob
4
5import dask
6import numpy as np
7import pytest
8
9import parcels
10from parcels.tools.statuscodes import DaskChunkingError
11
12ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
13
14
15def compute_nemo_particle_advection(fieldset, mode):
16 npart = 20
17 lonp = 2.5 * np.ones(npart)
18 latp = [i for i in 52.0 + (-1e-3 + np.random.rand(npart) * 2.0 * 1e-3)]
19
20 def periodicBC(particle, fieldSet, time): # pragma: no cover
21 if particle.lon > 15.0:
22 particle_dlon -= 15.0 # noqa
23 if particle.lon < 0:
24 particle_dlon += 15.0
25 if particle.lat > 60.0:
26 particle_dlat -= 11.0 # noqa
27 if particle.lat < 49.0:
28 particle_dlat += 11.0
29
30 pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp)
31 kernels = pset.Kernel(parcels.AdvectionRK4) + periodicBC
32 pset.execute(kernels, runtime=timedelta(days=4), dt=timedelta(hours=6))
33 return pset
34
35
36@pytest.mark.parametrize("mode", ["jit"])
37@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
38def test_nemo_3D(mode, chunk_mode):
39 if chunk_mode in [
40 "auto",
41 ]:
42 dask.config.set({"array.chunk-size": "256KiB"})
43 else:
44 dask.config.set({"array.chunk-size": "128MiB"})
45 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
46 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
47 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
48 wfiles = sorted(glob(f"{data_folder}/ORCA*W.nc"))
49 mesh_mask = f"{data_folder}/coordinates.nc"
50
51 filenames = {
52 "U": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": ufiles},
53 "V": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": vfiles},
54 "W": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": wfiles},
55 }
56 variables = {"U": "uo", "V": "vo", "W": "wo"}
57 dimensions = {
58 "U": {
59 "lon": "glamf",
60 "lat": "gphif",
61 "depth": "depthw",
62 "time": "time_counter",
63 },
64 "V": {
65 "lon": "glamf",
66 "lat": "gphif",
67 "depth": "depthw",
68 "time": "time_counter",
69 },
70 "W": {
71 "lon": "glamf",
72 "lat": "gphif",
73 "depth": "depthw",
74 "time": "time_counter",
75 },
76 }
77 chs = False
78 if chunk_mode == "auto":
79 chs = "auto"
80 elif chunk_mode == "specific":
81 chs = {
82 "U": {"depth": ("depthu", 75), "lat": ("y", 16), "lon": ("x", 16)},
83 "V": {"depth": ("depthv", 75), "lat": ("y", 16), "lon": ("x", 16)},
84 "W": {"depth": ("depthw", 75), "lat": ("y", 16), "lon": ("x", 16)},
85 }
86 elif chunk_mode == "failsafe": # chunking time and but not depth
87 filenames = {
88 "U": {
89 "lon": mesh_mask,
90 "lat": mesh_mask,
91 "depth": wfiles[0],
92 "data": ufiles,
93 },
94 "V": {
95 "lon": mesh_mask,
96 "lat": mesh_mask,
97 "depth": wfiles[0],
98 "data": vfiles,
99 },
100 }
101 variables = {"U": "uo", "V": "vo"}
102 dimensions = {
103 "U": {
104 "lon": "glamf",
105 "lat": "gphif",
106 "depth": "depthw",
107 "time": "time_counter",
108 },
109 "V": {
110 "lon": "glamf",
111 "lat": "gphif",
112 "depth": "depthw",
113 "time": "time_counter",
114 },
115 }
116 chs = {
117 "U": {
118 "time": ("time_counter", 1),
119 "depth": ("depthu", 25),
120 "lat": ("y", -1),
121 "lon": ("x", -1),
122 },
123 "V": {
124 "time": ("time_counter", 1),
125 "depth": ("depthv", 75),
126 "lat": ("y", -1),
127 "lon": ("x", -1),
128 },
129 }
130
131 fieldset = parcels.FieldSet.from_nemo(
132 filenames, variables, dimensions, chunksize=chs
133 )
134
135 compute_nemo_particle_advection(fieldset, mode)
136 # Nemo sample file dimensions: depthu=75, y=201, x=151
137 if chunk_mode != "failsafe":
138 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
139 assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk)
140 if chunk_mode is False:
141 assert len(fieldset.U.grid._load_chunk) == 1
142 elif chunk_mode == "auto":
143 assert (
144 fieldset.gridset.size == 3
145 ) # because three different grids in 'auto' mode
146 assert len(fieldset.U.grid._load_chunk) != 1
147 elif chunk_mode == "specific":
148 assert len(fieldset.U.grid._load_chunk) == (
149 1
150 * int(math.ceil(75.0 / 75.0))
151 * int(math.ceil(201.0 / 16.0))
152 * int(math.ceil(151.0 / 16.0))
153 )
154 elif chunk_mode == "failsafe": # chunking time and depth but not lat and lon
155 assert len(fieldset.U.grid._load_chunk) != 1
156 assert len(fieldset.U.grid._load_chunk) == (
157 1
158 * int(math.ceil(75.0 / 25.0))
159 * int(math.ceil(201.0 / 171.0))
160 * int(math.ceil(151.0 / 151.0))
161 )
162 assert len(fieldset.V.grid._load_chunk) != 1
163 assert len(fieldset.V.grid._load_chunk) == (
164 1
165 * int(math.ceil(75.0 / 75.0))
166 * int(math.ceil(201.0 / 171.0))
167 * int(math.ceil(151.0 / 151.0))
168 )
169
170
171@pytest.mark.parametrize("mode", ["jit"])
172@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
173def test_globcurrent_2D(mode, chunk_mode):
174 if chunk_mode in [
175 "auto",
176 ]:
177 dask.config.set({"array.chunk-size": "16KiB"})
178 else:
179 dask.config.set({"array.chunk-size": "128MiB"})
180 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
181 filenames = str(
182 data_folder / "200201*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc"
183 )
184 variables = {
185 "U": "eastward_eulerian_current_velocity",
186 "V": "northward_eulerian_current_velocity",
187 }
188 dimensions = {"lat": "lat", "lon": "lon", "time": "time"}
189 chs = False
190 if chunk_mode == "auto":
191 chs = "auto"
192 elif chunk_mode == "specific":
193 chs = {
194 "U": {"lat": ("lat", 8), "lon": ("lon", 8)},
195 "V": {"lat": ("lat", 8), "lon": ("lon", 8)},
196 }
197 elif chunk_mode == "failsafe": # chunking time but not lat
198 chs = {
199 "U": {"time": ("time", 1), "lat": ("lat", 10), "lon": ("lon", -1)},
200 "V": {"time": ("time", 1), "lat": ("lat", -1), "lon": ("lon", -1)},
201 }
202
203 fieldset = parcels.FieldSet.from_netcdf(
204 filenames, variables, dimensions, chunksize=chs
205 )
206 try:
207 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
208 pset.execute(
209 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
210 )
211 except DaskChunkingError:
212 # we removed the failsafe, so now if all chunksize dimensions are incorrect, there is nothing left to chunk,
213 # which raises an error saying so. This is the expected behaviour
214 if chunk_mode == "failsafe":
215 return
216 # GlobCurrent sample file dimensions: time=UNLIMITED, lat=41, lon=81
217 if chunk_mode != "failsafe": # chunking time but not lat
218 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
219 if chunk_mode is False:
220 assert len(fieldset.U.grid._load_chunk) == 1
221 elif chunk_mode == "auto":
222 assert len(fieldset.U.grid._load_chunk) != 1
223 elif chunk_mode == "specific":
224 assert len(fieldset.U.grid._load_chunk) == (
225 1 * int(math.ceil(41.0 / 8.0)) * int(math.ceil(81.0 / 8.0))
226 )
227 elif chunk_mode == "failsafe": # chunking time but not lat
228 assert len(fieldset.U.grid._load_chunk) != 1
229 assert len(fieldset.V.grid._load_chunk) != 1
230 assert abs(pset[0].lon - 23.8) < 1
231 assert abs(pset[0].lat - -35.3) < 1
232
233
234@pytest.mark.skip(
235 reason="Started failing around #1644 (2024-08-08). Some change in chunking, inconsistent behavior."
236)
237@pytest.mark.parametrize("mode", ["jit"])
238@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
239def test_pop(mode, chunk_mode):
240 if chunk_mode in [
241 "auto",
242 ]:
243 dask.config.set({"array.chunk-size": "256KiB"})
244 else:
245 dask.config.set({"array.chunk-size": "128MiB"})
246 data_folder = parcels.download_example_dataset("POPSouthernOcean_data")
247 filenames = str(data_folder / "t.x1_SAMOC_flux.1690*.nc")
248 variables = {"U": "UVEL", "V": "VVEL", "W": "WVEL"}
249 timestamps = np.expand_dims(
250 np.array([np.datetime64(f"2000-{m:02d}-01") for m in range(1, 7)]), axis=1
251 )
252 dimensions = {"lon": "ULON", "lat": "ULAT", "depth": "w_dep"}
253 chs = False
254 if chunk_mode == "auto":
255 chs = "auto"
256 elif chunk_mode == "specific":
257 chs = {"lon": ("i", 8), "lat": ("j", 8), "depth": ("k", 3)}
258 elif chunk_mode == "failsafe": # here: bad depth entry
259 chs = {"depth": ("wz", 3), "lat": ("j", 8), "lon": ("i", 8)}
260
261 fieldset = parcels.FieldSet.from_pop(
262 filenames, variables, dimensions, chunksize=chs, timestamps=timestamps
263 )
264
265 npart = 20
266 lonp = 70.0 * np.ones(npart)
267 latp = [i for i in -45.0 + (-0.25 + np.random.rand(npart) * 2.0 * 0.25)]
268 pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp)
269 pset.execute(parcels.AdvectionRK4, runtime=timedelta(days=90), dt=timedelta(days=2))
270 # POP sample file dimensions: k=21, j=60, i=60
271 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
272 assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk)
273 if chunk_mode is False:
274 assert fieldset.gridset.size == 1
275 assert len(fieldset.U.grid._load_chunk) == 1
276 assert len(fieldset.V.grid._load_chunk) == 1
277 assert len(fieldset.W.grid._load_chunk) == 1
278 elif chunk_mode == "auto":
279 assert (
280 fieldset.gridset.size == 3
281 ) # because three different grids in 'auto' mode
282 assert len(fieldset.U.grid._load_chunk) != 1
283 assert len(fieldset.V.grid._load_chunk) != 1
284 assert len(fieldset.W.grid._load_chunk) != 1
285 elif chunk_mode == "specific":
286 assert fieldset.gridset.size == 1
287 assert len(fieldset.U.grid._load_chunk) == (
288 int(math.ceil(21.0 / 3.0))
289 * int(math.ceil(60.0 / 8.0))
290 * int(math.ceil(60.0 / 8.0))
291 )
292 elif chunk_mode == "failsafe": # here: done a typo in the netcdf dimname field
293 assert fieldset.gridset.size == 1
294 assert len(fieldset.U.grid._load_chunk) != 1
295 assert len(fieldset.V.grid._load_chunk) != 1
296 assert len(fieldset.W.grid._load_chunk) != 1
297 assert len(fieldset.U.grid._load_chunk) == (
298 int(math.ceil(21.0 / 3.0))
299 * int(math.ceil(60.0 / 8.0))
300 * int(math.ceil(60.0 / 8.0))
301 )
302
303
304@pytest.mark.parametrize("mode", ["jit"])
305@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific", "failsafe"])
306def test_swash(mode, chunk_mode):
307 if chunk_mode in [
308 "auto",
309 ]:
310 dask.config.set({"array.chunk-size": "32KiB"})
311 else:
312 dask.config.set({"array.chunk-size": "128MiB"})
313 data_folder = parcels.download_example_dataset("SWASH_data")
314 filenames = str(data_folder / "field_*.nc")
315 variables = {
316 "U": "cross-shore velocity",
317 "V": "along-shore velocity",
318 "W": "vertical velocity",
319 "depth_w": "time varying depth",
320 "depth_u": "time varying depth_u",
321 }
322 dimensions = {
323 "U": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
324 "V": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
325 "W": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
326 "depth_w": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
327 "depth_u": {"lon": "x", "lat": "y", "depth": "not_yet_set", "time": "t"},
328 }
329 chs = False
330 if chunk_mode == "auto":
331 chs = "auto"
332 elif chunk_mode == "specific":
333 chs = {
334 "U": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
335 "V": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
336 "W": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
337 "depth_w": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
338 "depth_u": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
339 }
340 elif (
341 chunk_mode == "failsafe"
342 ): # here: incorrect matching between dimension names and their attachment to the NC-variables
343 chs = {
344 "U": {"depth": ("depth", 7), "lat": ("y", 4), "lon": ("x", 4)},
345 "V": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
346 "W": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
347 "depth_w": {"depth": ("z", 7), "lat": ("y", 4), "lon": ("x", 4)},
348 "depth_u": {"depth": ("z_u", 6), "lat": ("y", 4), "lon": ("x", 4)},
349 }
350 fieldset = parcels.FieldSet.from_netcdf(
351 filenames,
352 variables,
353 dimensions,
354 mesh="flat",
355 allow_time_extrapolation=True,
356 chunksize=chs,
357 )
358 fieldset.U.set_depth_from_field(fieldset.depth_u)
359 fieldset.V.set_depth_from_field(fieldset.depth_u)
360 fieldset.W.set_depth_from_field(fieldset.depth_w)
361
362 npart = 20
363 lonp = [i for i in 9.5 + (-0.2 + np.random.rand(npart) * 2.0 * 0.2)]
364 latp = [i for i in np.arange(start=12.3, stop=13.1, step=0.04)[0:20]]
365 depthp = [
366 -0.1,
367 ] * npart
368 pset = parcels.ParticleSet.from_list(
369 fieldset, ptype[mode], lon=lonp, lat=latp, depth=depthp
370 )
371 pset.execute(
372 parcels.AdvectionRK4,
373 runtime=timedelta(seconds=0.2),
374 dt=timedelta(seconds=0.005),
375 )
376 # SWASH sample file dimensions: t=1, z=7, z_u=6, y=21, x=51
377 if chunk_mode not in [
378 "failsafe",
379 ]:
380 assert len(fieldset.U.grid._load_chunk) == len(
381 fieldset.V.grid._load_chunk
382 ), f"U {fieldset.U.grid.chunk_info} vs V {fieldset.V.grid.chunk_info}"
383 if chunk_mode not in ["failsafe", "auto"]:
384 assert len(fieldset.U.grid._load_chunk) == len(
385 fieldset.W.grid._load_chunk
386 ), f"U {fieldset.U.grid.chunk_info} vs W {fieldset.W.grid.chunk_info}"
387 if chunk_mode is False:
388 assert len(fieldset.U.grid._load_chunk) == 1
389 else:
390 assert len(fieldset.U.grid._load_chunk) != 1
391 assert len(fieldset.V.grid._load_chunk) != 1
392 assert len(fieldset.W.grid._load_chunk) != 1
393 if chunk_mode == "specific":
394 assert len(fieldset.U.grid._load_chunk) == (
395 1
396 * int(math.ceil(6.0 / 6.0))
397 * int(math.ceil(21.0 / 4.0))
398 * int(math.ceil(51.0 / 4.0))
399 )
400 assert len(fieldset.V.grid._load_chunk) == (
401 1
402 * int(math.ceil(6.0 / 6.0))
403 * int(math.ceil(21.0 / 4.0))
404 * int(math.ceil(51.0 / 4.0))
405 )
406 assert len(fieldset.W.grid._load_chunk) == (
407 1
408 * int(math.ceil(7.0 / 7.0))
409 * int(math.ceil(21.0 / 4.0))
410 * int(math.ceil(51.0 / 4.0))
411 )
412
413
414@pytest.mark.parametrize("mode", ["jit"])
415@pytest.mark.parametrize("chunk_mode", [False, "auto", "specific"])
416def test_ofam_3D(mode, chunk_mode):
417 if chunk_mode in [
418 "auto",
419 ]:
420 dask.config.set({"array.chunk-size": "1024KiB"})
421 else:
422 dask.config.set({"array.chunk-size": "128MiB"})
423
424 data_folder = parcels.download_example_dataset("OFAM_example_data")
425 filenames = {
426 "U": f"{data_folder}/OFAM_simple_U.nc",
427 "V": f"{data_folder}/OFAM_simple_V.nc",
428 }
429 variables = {"U": "u", "V": "v"}
430 dimensions = {
431 "lat": "yu_ocean",
432 "lon": "xu_ocean",
433 "depth": "st_ocean",
434 "time": "Time",
435 }
436
437 chs = False
438 if chunk_mode == "auto":
439 chs = "auto"
440 elif chunk_mode == "specific":
441 chs = {
442 "lon": ("xu_ocean", 100),
443 "lat": ("yu_ocean", 50),
444 "depth": ("st_edges_ocean", 60),
445 "time": ("Time", 1),
446 }
447 fieldset = parcels.FieldSet.from_netcdf(
448 filenames, variables, dimensions, allow_time_extrapolation=True, chunksize=chs
449 )
450
451 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=180, lat=10, depth=2.5)
452 pset.execute(
453 parcels.AdvectionRK4, runtime=timedelta(days=10), dt=timedelta(minutes=5)
454 )
455 # OFAM sample file dimensions: time=UNLIMITED, st_ocean=1, st_edges_ocean=52, lat=601, lon=2001
456 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
457 if chunk_mode is False:
458 assert len(fieldset.U.grid._load_chunk) == 1
459 elif chunk_mode == "auto":
460 assert len(fieldset.U.grid._load_chunk) != 1
461 elif chunk_mode == "specific":
462 numblocks = [i for i in fieldset.U.grid.chunk_info[1:3]]
463 dblocks = 1
464 vblocks = 0
465 for bsize in fieldset.U.grid.chunk_info[3 : 3 + numblocks[0]]:
466 vblocks += bsize
467 ublocks = 0
468 for bsize in fieldset.U.grid.chunk_info[
469 3 + numblocks[0] : 3 + numblocks[0] + numblocks[1]
470 ]:
471 ublocks += bsize
472 matching_numblocks = ublocks == 2001 and vblocks == 601 and dblocks == 1
473 matching_fields = fieldset.U.grid.chunk_info == fieldset.V.grid.chunk_info
474 matching_uniformblocks = len(fieldset.U.grid._load_chunk) == (
475 1
476 * int(math.ceil(1.0 / 60.0))
477 * int(math.ceil(601.0 / 50.0))
478 * int(math.ceil(2001.0 / 100.0))
479 )
480 assert matching_uniformblocks or (matching_fields and matching_numblocks)
481 assert abs(pset[0].lon - 173) < 1
482 assert abs(pset[0].lat - 11) < 1
483
484
485@pytest.mark.parametrize("mode", ["jit"])
486@pytest.mark.parametrize(
487 "chunk_mode",
488 [
489 False,
490 pytest.param(
491 "auto",
492 marks=pytest.mark.xfail(
493 reason="Dask v2024.11.0 caused auto chunking to fail. See #1762"
494 ),
495 ),
496 "specific_same",
497 "specific_different",
498 ],
499)
500@pytest.mark.parametrize("using_add_field", [False, True])
501def test_mitgcm(mode, chunk_mode, using_add_field):
502 if chunk_mode in [
503 "auto",
504 ]:
505 dask.config.set({"array.chunk-size": "512KiB"})
506 else:
507 dask.config.set({"array.chunk-size": "128MiB"})
508 data_folder = parcels.download_example_dataset("MITgcm_example_data")
509 filenames = {
510 "U": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
511 "V": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
512 }
513 variables = {"U": "UVEL", "V": "VVEL"}
514 dimensions = {
515 "U": {"lon": "XG", "lat": "YG", "time": "time"},
516 "V": {"lon": "XG", "lat": "YG", "time": "time"},
517 }
518
519 chs = False
520 if chunk_mode == "auto":
521 chs = "auto"
522 elif chunk_mode == "specific_same":
523 chs = {
524 "U": {"lat": ("YC", 50), "lon": ("XG", 100)},
525 "V": {"lat": ("YG", 50), "lon": ("XC", 100)},
526 }
527 elif chunk_mode == "specific_different":
528 chs = {
529 "U": {"lat": ("YC", 50), "lon": ("XG", 100)},
530 "V": {"lat": ("YG", 40), "lon": ("XC", 100)},
531 }
532 if using_add_field:
533 if chs in [False, "auto"]:
534 chs = {"U": chs, "V": chs}
535 fieldset = parcels.FieldSet.from_mitgcm(
536 filenames["U"],
537 {"U": variables["U"]},
538 dimensions["U"],
539 mesh="flat",
540 chunksize=chs["U"],
541 )
542 fieldset2 = parcels.FieldSet.from_mitgcm(
543 filenames["V"],
544 {"V": variables["V"]},
545 dimensions["V"],
546 mesh="flat",
547 chunksize=chs["V"],
548 )
549 fieldset.add_field(fieldset2.V)
550 else:
551 fieldset = parcels.FieldSet.from_mitgcm(
552 filenames, variables, dimensions, mesh="flat", chunksize=chs
553 )
554
555 pset = parcels.ParticleSet.from_list(
556 fieldset=fieldset, pclass=ptype[mode], lon=5e5, lat=5e5
557 )
558 pset.execute(
559 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
560 )
561 # MITgcm sample file dimensions: time=10, XG=400, YG=200
562 if chunk_mode != "specific_different":
563 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
564 if chunk_mode in [
565 False,
566 ]:
567 assert len(fieldset.U.grid._load_chunk) == 1
568 elif chunk_mode in [
569 "auto",
570 ]:
571 assert len(fieldset.U.grid._load_chunk) != 1
572 elif "specific" in chunk_mode:
573 assert len(fieldset.U.grid._load_chunk) == (
574 1 * int(math.ceil(400.0 / 50.0)) * int(math.ceil(200.0 / 100.0))
575 )
576 if chunk_mode == "specific_same":
577 assert fieldset.gridset.size == 1
578 elif chunk_mode == "specific_different":
579 assert fieldset.gridset.size == 2
580 assert np.allclose(pset[0].lon, 5.27e5, atol=1e3)
581
582
583@pytest.mark.parametrize("mode", ["jit"])
584def test_diff_entry_dimensions_chunks(mode):
585 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
586 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
587 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
588 mesh_mask = f"{data_folder}/coordinates.nc"
589
590 filenames = {
591 "U": {"lon": mesh_mask, "lat": mesh_mask, "data": ufiles},
592 "V": {"lon": mesh_mask, "lat": mesh_mask, "data": vfiles},
593 }
594 variables = {"U": "uo", "V": "vo"}
595 dimensions = {
596 "U": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
597 "V": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
598 }
599 chs = {
600 "U": {"depth": ("depthu", 75), "lat": ("y", 16), "lon": ("x", 16)},
601 "V": {"depth": ("depthv", 75), "lat": ("y", 16), "lon": ("x", 16)},
602 }
603 fieldset = parcels.FieldSet.from_nemo(
604 filenames, variables, dimensions, chunksize=chs
605 )
606 compute_nemo_particle_advection(fieldset, mode)
607 # Nemo sample file dimensions: depthu=75, y=201, x=151
608 assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk)
609
610
611@pytest.mark.parametrize("mode", ["scipy", "jit"])
612def test_3d_2dfield_sampling(mode):
613 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
614 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
615 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
616 mesh_mask = f"{data_folder}/coordinates.nc"
617
618 filenames = {
619 "U": {"lon": mesh_mask, "lat": mesh_mask, "data": ufiles},
620 "V": {"lon": mesh_mask, "lat": mesh_mask, "data": vfiles},
621 "nav_lon": {
622 "lon": mesh_mask,
623 "lat": mesh_mask,
624 "data": [
625 ufiles[0],
626 ],
627 },
628 }
629 variables = {"U": "uo", "V": "vo", "nav_lon": "nav_lon"}
630 dimensions = {
631 "U": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
632 "V": {"lon": "glamf", "lat": "gphif", "time": "time_counter"},
633 "nav_lon": {"lon": "glamf", "lat": "gphif"},
634 }
635 fieldset = parcels.FieldSet.from_nemo(
636 filenames, variables, dimensions, chunksize=False
637 )
638 fieldset.nav_lon.data = np.ones(fieldset.nav_lon.data.shape, dtype=np.float32)
639 fieldset.add_field(
640 parcels.Field(
641 "rectilinear_2D",
642 np.ones((2, 2)),
643 lon=np.array([-10, 20]),
644 lat=np.array([40, 80]),
645 chunksize=False,
646 )
647 )
648
649 class MyParticle(ptype[mode]):
650 sample_var_curvilinear = parcels.Variable("sample_var_curvilinear")
651 sample_var_rectilinear = parcels.Variable("sample_var_rectilinear")
652
653 pset = parcels.ParticleSet(fieldset, pclass=MyParticle, lon=2.5, lat=52)
654
655 def Sample2D(particle, fieldset, time): # pragma: no cover
656 particle.sample_var_curvilinear += fieldset.nav_lon[
657 time, particle.depth, particle.lat, particle.lon
658 ]
659 particle.sample_var_rectilinear += fieldset.rectilinear_2D[
660 time, particle.depth, particle.lat, particle.lon
661 ]
662
663 runtime, dt = 86400 * 4, 6 * 3600
664 pset.execute(pset.Kernel(parcels.AdvectionRK4) + Sample2D, runtime=runtime, dt=dt)
665
666 assert pset.sample_var_rectilinear == runtime / dt
667 assert pset.sample_var_curvilinear == runtime / dt
668
669
670@pytest.mark.parametrize("mode", ["jit"])
671def test_diff_entry_chunksize_error_nemo_complex_conform_depth(mode):
672 # ==== this test is expected to fall-back to a pre-defined minimal chunk as ==== #
673 # ==== the requested chunks don't match, or throw a value error. ==== #
674 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
675 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
676 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
677 wfiles = sorted(glob(f"{data_folder}/ORCA*W.nc"))
678 mesh_mask = f"{data_folder}/coordinates.nc"
679
680 filenames = {
681 "U": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": ufiles},
682 "V": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": vfiles},
683 "W": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": wfiles},
684 }
685 variables = {"U": "uo", "V": "vo", "W": "wo"}
686 dimensions = {
687 "U": {
688 "lon": "glamf",
689 "lat": "gphif",
690 "depth": "depthw",
691 "time": "time_counter",
692 },
693 "V": {
694 "lon": "glamf",
695 "lat": "gphif",
696 "depth": "depthw",
697 "time": "time_counter",
698 },
699 "W": {
700 "lon": "glamf",
701 "lat": "gphif",
702 "depth": "depthw",
703 "time": "time_counter",
704 },
705 }
706 chs = {
707 "U": {"depth": ("depthu", 75), "lat": ("y", 16), "lon": ("x", 16)},
708 "V": {"depth": ("depthv", 75), "lat": ("y", 4), "lon": ("x", 16)},
709 "W": {"depth": ("depthw", 75), "lat": ("y", 16), "lon": ("x", 4)},
710 }
711 fieldset = parcels.FieldSet.from_nemo(
712 filenames, variables, dimensions, chunksize=chs
713 )
714 compute_nemo_particle_advection(fieldset, mode)
715 # Nemo sample file dimensions: depthu=75, y=201, x=151
716 npart_U = 1
717 npart_U = [npart_U * k for k in fieldset.U.nchunks[1:]]
718 npart_V = 1
719 npart_V = [npart_V * k for k in fieldset.V.nchunks[1:]]
720 npart_W = 1
721 npart_W = [npart_W * k for k in fieldset.V.nchunks[1:]]
722 chn = {
723 "U": {
724 "lat": int(math.ceil(201.0 / chs["U"]["lat"][1])),
725 "lon": int(math.ceil(151.0 / chs["U"]["lon"][1])),
726 "depth": int(math.ceil(75.0 / chs["U"]["depth"][1])),
727 },
728 "V": {
729 "lat": int(math.ceil(201.0 / chs["V"]["lat"][1])),
730 "lon": int(math.ceil(151.0 / chs["V"]["lon"][1])),
731 "depth": int(math.ceil(75.0 / chs["V"]["depth"][1])),
732 },
733 "W": {
734 "lat": int(math.ceil(201.0 / chs["W"]["lat"][1])),
735 "lon": int(math.ceil(151.0 / chs["W"]["lon"][1])),
736 "depth": int(math.ceil(75.0 / chs["W"]["depth"][1])),
737 },
738 }
739 npart_U_request = 1
740 npart_U_request = [npart_U_request * chn["U"][k] for k in chn["U"]]
741 npart_V_request = 1
742 npart_V_request = [npart_V_request * chn["V"][k] for k in chn["V"]]
743 npart_W_request = 1
744 npart_W_request = [npart_W_request * chn["W"][k] for k in chn["W"]]
745 assert npart_U != npart_U_request
746 assert npart_V != npart_V_request
747 assert npart_W != npart_W_request
748
749
750@pytest.mark.parametrize("mode", ["jit"])
751def test_diff_entry_chunksize_correction_globcurrent(mode):
752 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
753 filenames = str(
754 data_folder / "200201*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc"
755 )
756 variables = {
757 "U": "eastward_eulerian_current_velocity",
758 "V": "northward_eulerian_current_velocity",
759 }
760 dimensions = {"lat": "lat", "lon": "lon", "time": "time"}
761 chs = {
762 "U": {"lat": ("lat", 16), "lon": ("lon", 16)},
763 "V": {"lat": ("lat", 16), "lon": ("lon", 4)},
764 }
765 fieldset = parcels.FieldSet.from_netcdf(
766 filenames, variables, dimensions, chunksize=chs
767 )
768 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
769 pset.execute(
770 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
771 )
772 # GlobCurrent sample file dimensions: time=UNLIMITED, lat=41, lon=81
773 npart_U = 1
774 npart_U = [npart_U * k for k in fieldset.U.nchunks[1:]]
775 npart_V = 1
776 npart_V = [npart_V * k for k in fieldset.V.nchunks[1:]]
777 npart_V_request = 1
778 chn = {
779 "U": {
780 "lat": int(math.ceil(41.0 / chs["U"]["lat"][1])),
781 "lon": int(math.ceil(81.0 / chs["U"]["lon"][1])),
782 },
783 "V": {
784 "lat": int(math.ceil(41.0 / chs["V"]["lat"][1])),
785 "lon": int(math.ceil(81.0 / chs["V"]["lon"][1])),
786 },
787 }
788 npart_V_request = [npart_V_request * chn["V"][k] for k in chn["V"]]
789 assert npart_V_request != npart_U
790 assert npart_V_request == npart_V
example_decaying_moving_eddy.py#
1from datetime import timedelta
2
3import numpy as np
4import pytest
5
6import parcels
7
8ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
9
10# Define some constants.
11u_g = 0.04 # Geostrophic current
12u_0 = 0.3 # Initial speed in x dirrection. v_0 = 0
13gamma = (
14 1.0 / timedelta(days=2.89).total_seconds()
15) # Dissipitave effects due to viscousity.
16gamma_g = 1.0 / timedelta(days=28.9).total_seconds()
17f = 1.0e-4 # Coriolis parameter.
18start_lon = [10000.0] # Define the start longitude and latitude for the particle.
19start_lat = [10000.0]
20
21
22def decaying_moving_eddy_fieldset(
23 xdim=2, ydim=2
24): # Define 2D flat, square fieldset for testing purposes.
25 """Simulate an ocean that accelerates subject to Coriolis force
26 and dissipative effects, upon which a geostrophic current is
27 superimposed.
28
29 The original test description can be found in: N. Fabbroni, 2009,
30 Numerical Simulation of Passive tracers dispersion in the sea,
31 Ph.D. dissertation, University of Bologna
32 http://amsdottorato.unibo.it/1733/1/Fabbroni_Nicoletta_Tesi.pdf
33 """
34 depth = np.zeros(1, dtype=np.float32)
35 time = np.arange(0.0, 2.0 * 86400.0 + 1e-5, 60.0 * 5.0, dtype=np.float64)
36 lon = np.linspace(0, 20000, xdim, dtype=np.float32)
37 lat = np.linspace(5000, 12000, ydim, dtype=np.float32)
38
39 U = np.zeros((time.size, lat.size, lon.size), dtype=np.float32)
40 V = np.zeros((time.size, lat.size, lon.size), dtype=np.float32)
41
42 for t in range(time.size):
43 U[t, :, :] = u_g * np.exp(-gamma_g * time[t]) + (u_0 - u_g) * np.exp(
44 -gamma * time[t]
45 ) * np.cos(f * time[t])
46 V[t, :, :] = -(u_0 - u_g) * np.exp(-gamma * time[t]) * np.sin(f * time[t])
47
48 data = {"U": U, "V": V}
49 dimensions = {"lon": lon, "lat": lat, "depth": depth, "time": time}
50 return parcels.FieldSet.from_data(data, dimensions, mesh="flat")
51
52
53def true_values(
54 t, x_0, y_0
55): # Calculate the expected values for particles at the endtime, given their start location.
56 x = (
57 x_0
58 + (u_g / gamma_g) * (1 - np.exp(-gamma_g * t))
59 + f
60 * ((u_0 - u_g) / (f**2 + gamma**2))
61 * (
62 (gamma / f)
63 + np.exp(-gamma * t) * (np.sin(f * t) - (gamma / f) * np.cos(f * t))
64 )
65 )
66 y = y_0 - ((u_0 - u_g) / (f**2 + gamma**2)) * f * (
67 1 - np.exp(-gamma * t) * (np.cos(f * t) + (gamma / f) * np.sin(f * t))
68 )
69
70 return np.array([x, y])
71
72
73def decaying_moving_example(
74 fieldset, outfile, mode="scipy", method=parcels.AdvectionRK4
75):
76 pset = parcels.ParticleSet(
77 fieldset, pclass=ptype[mode], lon=start_lon, lat=start_lat
78 )
79
80 dt = timedelta(minutes=5)
81 runtime = timedelta(days=2)
82 outputdt = timedelta(hours=1)
83
84 pset.execute(
85 method,
86 runtime=runtime,
87 dt=dt,
88 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
89 )
90
91 return pset
92
93
94@pytest.mark.parametrize("mode", ["scipy", "jit"])
95def test_rotation_example(mode, tmpdir):
96 outfile = tmpdir.join("DecayingMovingParticle.zarr")
97 fieldset = decaying_moving_eddy_fieldset()
98 pset = decaying_moving_example(fieldset, outfile, mode=mode)
99 vals = true_values(
100 pset[0].time, start_lon, start_lat
101 ) # Calculate values for the particle.
102 assert np.allclose(
103 np.array([[pset[0].lon], [pset[0].lat]]), vals, 1e-2
104 ) # Check advected values against calculated values.
105
106
107def main():
108 fset_filename = "decaying_moving_eddy"
109 outfile = "DecayingMovingParticle.zarr"
110 fieldset = decaying_moving_eddy_fieldset()
111 fieldset.write(fset_filename)
112
113 decaying_moving_example(fieldset, outfile)
114
115
116if __name__ == "__main__":
117 main()
example_globcurrent.py#
1from datetime import timedelta
2from glob import glob
3
4import numpy as np
5import pytest
6import xarray as xr
7
8import parcels
9
10ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
11
12
13def set_globcurrent_fieldset(
14 filename=None,
15 indices=None,
16 deferred_load=True,
17 use_xarray=False,
18 time_periodic=False,
19 timestamps=None,
20):
21 if filename is None:
22 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
23 filename = str(
24 data_folder / "2002*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc"
25 )
26 variables = {
27 "U": "eastward_eulerian_current_velocity",
28 "V": "northward_eulerian_current_velocity",
29 }
30 if timestamps is None:
31 dimensions = {"lat": "lat", "lon": "lon", "time": "time"}
32 else:
33 dimensions = {"lat": "lat", "lon": "lon"}
34 if use_xarray:
35 ds = xr.open_mfdataset(filename, combine="by_coords")
36 return parcels.FieldSet.from_xarray_dataset(
37 ds, variables, dimensions, time_periodic=time_periodic
38 )
39 else:
40 return parcels.FieldSet.from_netcdf(
41 filename,
42 variables,
43 dimensions,
44 indices,
45 deferred_load=deferred_load,
46 time_periodic=time_periodic,
47 timestamps=timestamps,
48 )
49
50
51@pytest.mark.parametrize("use_xarray", [True, False])
52def test_globcurrent_fieldset(use_xarray):
53 fieldset = set_globcurrent_fieldset(use_xarray=use_xarray)
54 assert fieldset.U.lon.size == 81
55 assert fieldset.U.lat.size == 41
56 assert fieldset.V.lon.size == 81
57 assert fieldset.V.lat.size == 41
58
59 if not use_xarray:
60 indices = {"lon": [5], "lat": range(20, 30)}
61 fieldsetsub = set_globcurrent_fieldset(indices=indices, use_xarray=use_xarray)
62 assert np.allclose(fieldsetsub.U.lon, fieldset.U.lon[indices["lon"]])
63 assert np.allclose(fieldsetsub.U.lat, fieldset.U.lat[indices["lat"]])
64 assert np.allclose(fieldsetsub.V.lon, fieldset.V.lon[indices["lon"]])
65 assert np.allclose(fieldsetsub.V.lat, fieldset.V.lat[indices["lat"]])
66
67
68@pytest.mark.parametrize("mode", ["scipy", "jit"])
69@pytest.mark.parametrize(
70 "dt, lonstart, latstart", [(3600.0, 25, -35), (-3600.0, 20, -39)]
71)
72@pytest.mark.parametrize("use_xarray", [True, False])
73def test_globcurrent_fieldset_advancetime(mode, dt, lonstart, latstart, use_xarray):
74 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
75 basepath = str(data_folder / "20*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc")
76 files = sorted(glob(str(basepath)))
77
78 fieldsetsub = set_globcurrent_fieldset(files[0:10], use_xarray=use_xarray)
79 psetsub = parcels.ParticleSet.from_list(
80 fieldset=fieldsetsub, pclass=ptype[mode], lon=[lonstart], lat=[latstart]
81 )
82
83 fieldsetall = set_globcurrent_fieldset(
84 files[0:10], deferred_load=False, use_xarray=use_xarray
85 )
86 psetall = parcels.ParticleSet.from_list(
87 fieldset=fieldsetall, pclass=ptype[mode], lon=[lonstart], lat=[latstart]
88 )
89 if dt < 0:
90 psetsub[0].time_nextloop = fieldsetsub.U.grid.time[-1]
91 psetall[0].time_nextloop = fieldsetall.U.grid.time[-1]
92
93 psetsub.execute(parcels.AdvectionRK4, runtime=timedelta(days=7), dt=dt)
94 psetall.execute(parcels.AdvectionRK4, runtime=timedelta(days=7), dt=dt)
95
96 assert abs(psetsub[0].lon - psetall[0].lon) < 1e-4
97
98
99@pytest.mark.parametrize("mode", ["scipy", "jit"])
100@pytest.mark.parametrize("use_xarray", [True, False])
101def test_globcurrent_particles(mode, use_xarray):
102 fieldset = set_globcurrent_fieldset(use_xarray=use_xarray)
103
104 lonstart = [25]
105 latstart = [-35]
106
107 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=lonstart, lat=latstart)
108
109 pset.execute(
110 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
111 )
112
113 assert abs(pset[0].lon - 23.8) < 1
114 assert abs(pset[0].lat - -35.3) < 1
115
116
117@pytest.mark.parametrize("mode", ["scipy", "jit"])
118@pytest.mark.parametrize("rundays", [300, 900])
119def test_globcurrent_time_periodic(mode, rundays):
120 sample_var = []
121 for deferred_load in [True, False]:
122 fieldset = set_globcurrent_fieldset(
123 time_periodic=timedelta(days=365), deferred_load=deferred_load
124 )
125
126 MyParticle = ptype[mode].add_variable("sample_var", initial=0.0)
127
128 pset = parcels.ParticleSet(
129 fieldset, pclass=MyParticle, lon=25, lat=-35, time=fieldset.U.grid.time[0]
130 )
131
132 def SampleU(particle, fieldset, time): # pragma: no cover
133 u, v = fieldset.UV[time, particle.depth, particle.lat, particle.lon]
134 particle.sample_var += u
135
136 pset.execute(SampleU, runtime=timedelta(days=rundays), dt=timedelta(days=1))
137 sample_var.append(pset[0].sample_var)
138
139 assert np.allclose(sample_var[0], sample_var[1])
140
141
142@pytest.mark.parametrize("dt", [-300, 300])
143def test_globcurrent_xarray_vs_netcdf(dt):
144 fieldsetNetcdf = set_globcurrent_fieldset(use_xarray=False)
145 fieldsetxarray = set_globcurrent_fieldset(use_xarray=True)
146 lonstart, latstart, runtime = (25, -35, timedelta(days=7))
147
148 psetN = parcels.ParticleSet(
149 fieldsetNetcdf, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
150 )
151 psetN.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
152
153 psetX = parcels.ParticleSet(
154 fieldsetxarray, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
155 )
156 psetX.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
157
158 assert np.allclose(psetN[0].lon, psetX[0].lon)
159 assert np.allclose(psetN[0].lat, psetX[0].lat)
160
161
162@pytest.mark.parametrize("dt", [-300, 300])
163def test_globcurrent_netcdf_timestamps(dt):
164 fieldsetNetcdf = set_globcurrent_fieldset()
165 timestamps = fieldsetNetcdf.U.grid.timeslices
166 fieldsetTimestamps = set_globcurrent_fieldset(timestamps=timestamps)
167 lonstart, latstart, runtime = (25, -35, timedelta(days=7))
168
169 psetN = parcels.ParticleSet(
170 fieldsetNetcdf, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
171 )
172 psetN.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
173
174 psetT = parcels.ParticleSet(
175 fieldsetTimestamps, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
176 )
177 psetT.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
178
179 assert np.allclose(psetN.lon[0], psetT.lon[0])
180 assert np.allclose(psetN.lat[0], psetT.lat[0])
181
182
183def test__particles_init_time():
184 fieldset = set_globcurrent_fieldset()
185
186 lonstart = [25]
187 latstart = [-35]
188
189 # tests the different ways of initialising the time of a particle
190 pset = parcels.ParticleSet(
191 fieldset,
192 pclass=parcels.JITParticle,
193 lon=lonstart,
194 lat=latstart,
195 time=np.datetime64("2002-01-15"),
196 )
197 pset2 = parcels.ParticleSet(
198 fieldset,
199 pclass=parcels.JITParticle,
200 lon=lonstart,
201 lat=latstart,
202 time=14 * 86400,
203 )
204 pset3 = parcels.ParticleSet(
205 fieldset,
206 pclass=parcels.JITParticle,
207 lon=lonstart,
208 lat=latstart,
209 time=np.array([np.datetime64("2002-01-15")]),
210 )
211 pset4 = parcels.ParticleSet(
212 fieldset,
213 pclass=parcels.JITParticle,
214 lon=lonstart,
215 lat=latstart,
216 time=[np.datetime64("2002-01-15")],
217 )
218 assert pset[0].time - pset2[0].time == 0
219 assert pset[0].time - pset3[0].time == 0
220 assert pset[0].time - pset4[0].time == 0
221
222
223@pytest.mark.parametrize("mode", ["scipy", "jit"])
224@pytest.mark.parametrize("use_xarray", [True, False])
225def test_globcurrent_time_extrapolation_error(mode, use_xarray):
226 fieldset = set_globcurrent_fieldset(use_xarray=use_xarray)
227 pset = parcels.ParticleSet(
228 fieldset,
229 pclass=ptype[mode],
230 lon=[25],
231 lat=[-35],
232 time=fieldset.U.grid.time[0] - timedelta(days=1).total_seconds(),
233 )
234 with pytest.raises(parcels.TimeExtrapolationError):
235 pset.execute(
236 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=timedelta(minutes=5)
237 )
238
239
240@pytest.mark.parametrize("mode", ["scipy", "jit"])
241@pytest.mark.parametrize("dt", [-300, 300])
242@pytest.mark.parametrize("with_starttime", [True, False])
243def test_globcurrent_startparticles_between_time_arrays(mode, dt, with_starttime):
244 fieldset = set_globcurrent_fieldset()
245
246 data_folder = parcels.download_example_dataset("GlobCurrent_example_data")
247 fnamesFeb = sorted(glob(f"{data_folder}/200202*.nc"))
248 fieldset.add_field(
249 parcels.Field.from_netcdf(
250 fnamesFeb,
251 ("P", "eastward_eulerian_current_velocity"),
252 {"lat": "lat", "lon": "lon", "time": "time"},
253 )
254 )
255
256 MyParticle = ptype[mode].add_variable("sample_var", initial=0.0)
257
258 def SampleP(particle, fieldset, time): # pragma: no cover
259 particle.sample_var += fieldset.P[
260 time, particle.depth, particle.lat, particle.lon
261 ]
262
263 if with_starttime:
264 time = fieldset.U.grid.time[0] if dt > 0 else fieldset.U.grid.time[-1]
265 pset = parcels.ParticleSet(
266 fieldset, pclass=MyParticle, lon=[25], lat=[-35], time=time
267 )
268 else:
269 pset = parcels.ParticleSet(fieldset, pclass=MyParticle, lon=[25], lat=[-35])
270
271 if with_starttime:
272 with pytest.raises(parcels.TimeExtrapolationError):
273 pset.execute(
274 pset.Kernel(parcels.AdvectionRK4) + SampleP,
275 runtime=timedelta(days=1),
276 dt=dt,
277 )
278 else:
279 pset.execute(
280 pset.Kernel(parcels.AdvectionRK4) + SampleP,
281 runtime=timedelta(days=1),
282 dt=dt,
283 )
284
285
286@pytest.mark.parametrize("mode", ["scipy", "jit"])
287def test_globcurrent_particle_independence(mode, rundays=5):
288 fieldset = set_globcurrent_fieldset()
289 time0 = fieldset.U.grid.time[0]
290
291 def DeleteP0(particle, fieldset, time): # pragma: no cover
292 if particle.id == 0:
293 particle.delete()
294
295 pset0 = parcels.ParticleSet(
296 fieldset, pclass=ptype[mode], lon=[25, 25], lat=[-35, -35], time=time0
297 )
298
299 pset0.execute(
300 pset0.Kernel(DeleteP0) + parcels.AdvectionRK4,
301 runtime=timedelta(days=rundays),
302 dt=timedelta(minutes=5),
303 )
304
305 pset1 = parcels.ParticleSet(
306 fieldset, pclass=ptype[mode], lon=[25, 25], lat=[-35, -35], time=time0
307 )
308
309 pset1.execute(
310 parcels.AdvectionRK4, runtime=timedelta(days=rundays), dt=timedelta(minutes=5)
311 )
312
313 assert np.allclose([pset0[-1].lon, pset0[-1].lat], [pset1[-1].lon, pset1[-1].lat])
314
315
316@pytest.mark.parametrize("mode", ["scipy", "jit"])
317@pytest.mark.parametrize("dt", [-300, 300])
318@pytest.mark.parametrize("pid_offset", [0, 20])
319def test_globcurrent_pset_fromfile(mode, dt, pid_offset, tmpdir):
320 filename = tmpdir.join("pset_fromparticlefile.zarr")
321 fieldset = set_globcurrent_fieldset()
322
323 ptype[mode].setLastID(pid_offset)
324 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
325 pfile = pset.ParticleFile(filename, outputdt=timedelta(hours=6))
326 pset.execute(
327 parcels.AdvectionRK4, runtime=timedelta(days=1), dt=dt, output_file=pfile
328 )
329 pfile.write_latest_locations(pset, max(pset.time_nextloop))
330
331 restarttime = np.nanmax if dt > 0 else np.nanmin
332 pset_new = parcels.ParticleSet.from_particlefile(
333 fieldset, pclass=ptype[mode], filename=filename, restarttime=restarttime
334 )
335 pset.execute(parcels.AdvectionRK4, runtime=timedelta(days=1), dt=dt)
336 pset_new.execute(parcels.AdvectionRK4, runtime=timedelta(days=1), dt=dt)
337
338 for var in ["lon", "lat", "depth", "time", "id"]:
339 assert np.allclose(
340 [getattr(p, var) for p in pset], [getattr(p, var) for p in pset_new]
341 )
342
343
344@pytest.mark.parametrize("mode", ["scipy", "jit"])
345def test_error_outputdt_not_multiple_dt(mode, tmpdir):
346 # Test that outputdt is a multiple of dt
347 fieldset = set_globcurrent_fieldset()
348
349 filepath = tmpdir.join("pfile_error_outputdt_not_multiple_dt.zarr")
350
351 dt = 81.2584344538292 # number for which output writing fails
352
353 pset = parcels.ParticleSet(fieldset, pclass=ptype[mode], lon=[0], lat=[0])
354 ofile = pset.ParticleFile(name=filepath, outputdt=timedelta(days=1))
355
356 def DoNothing(particle, fieldset, time): # pragma: no cover
357 pass
358
359 with pytest.raises(ValueError):
360 pset.execute(DoNothing, runtime=timedelta(days=10), dt=dt, output_file=ofile)
example_mitgcm.py#
1from datetime import timedelta
2from pathlib import Path
3from typing import Literal
4
5import numpy as np
6import xarray as xr
7
8import parcels
9
10ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
11
12
13def run_mitgcm_zonally_reentrant(mode: Literal["scipy", "jit"], path: Path):
14 """Function that shows how to load MITgcm data in a zonally periodic domain."""
15 data_folder = parcels.download_example_dataset("MITgcm_example_data")
16 filenames = {
17 "U": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
18 "V": f"{data_folder}/mitgcm_UV_surface_zonally_reentrant.nc",
19 }
20 variables = {"U": "UVEL", "V": "VVEL"}
21 dimensions = {
22 "U": {"lon": "XG", "lat": "YG", "time": "time"},
23 "V": {"lon": "XG", "lat": "YG", "time": "time"},
24 }
25 fieldset = parcels.FieldSet.from_mitgcm(
26 filenames, variables, dimensions, mesh="flat"
27 )
28
29 fieldset.add_periodic_halo(zonal=True)
30 fieldset.add_constant("domain_width", 1000000)
31
32 def periodicBC(particle, fieldset, time): # pragma: no cover
33 if particle.lon < 0:
34 particle_dlon += fieldset.domain_width # noqa
35 elif particle.lon > fieldset.domain_width:
36 particle_dlon -= fieldset.domain_width
37
38 # Release particles 5 cells away from the Eastern boundary
39 pset = parcels.ParticleSet.from_line(
40 fieldset,
41 pclass=ptype[mode],
42 start=(fieldset.U.grid.lon[-5], fieldset.U.grid.lat[5]),
43 finish=(fieldset.U.grid.lon[-5], fieldset.U.grid.lat[-5]),
44 size=10,
45 )
46 pfile = parcels.ParticleFile(
47 str(path),
48 pset,
49 outputdt=timedelta(days=1),
50 chunks=(len(pset), 1),
51 )
52 kernels = parcels.AdvectionRK4 + pset.Kernel(periodicBC)
53 pset.execute(
54 kernels, runtime=timedelta(days=5), dt=timedelta(minutes=30), output_file=pfile
55 )
56
57
58def test_mitgcm_output_compare(tmpdir):
59 def get_path(mode: Literal["scipy", "jit"]) -> Path:
60 return tmpdir / f"MIT_particles_{mode}.zarr"
61
62 for mode in ["scipy", "jit"]:
63 run_mitgcm_zonally_reentrant(mode, get_path(mode))
64
65 ds_jit = xr.open_zarr(get_path("jit"))
66 ds_scipy = xr.open_zarr(get_path("scipy"))
67
68 np.testing.assert_allclose(ds_jit.lat.data, ds_scipy.lat.data)
69 np.testing.assert_allclose(ds_jit.lon.data, ds_scipy.lon.data)
example_moving_eddies.py#
1import gc
2import math
3from argparse import ArgumentParser
4from datetime import timedelta
5
6import numpy as np
7import pytest
8
9import parcels
10
11ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
12method = {
13 "RK4": parcels.AdvectionRK4,
14 "EE": parcels.AdvectionEE,
15 "RK45": parcels.AdvectionRK45,
16}
17
18
19def moving_eddies_fieldset(xdim=200, ydim=350, mesh="flat"):
20 """Generate a fieldset encapsulating the flow field consisting of two
21 moving eddies, one moving westward and the other moving northwestward.
22
23 Parameters
24 ----------
25 xdim :
26 Horizontal dimension of the generated fieldset (Default value = 200)
27 xdim :
28 Vertical dimension of the generated fieldset (Default value = 200)
29 mesh : str
30 String indicating the type of mesh coordinates and
31 units used during velocity interpolation:
32
33 1. spherical: Lat and lon in degree, with a
34 correction for zonal velocity U near the poles.
35 2. flat (default): No conversion, lat/lon are assumed to be in m.
36 ydim :
37 (Default value = 350)
38
39
40 Notes
41 -----
42 Note that this is not a proper geophysical flow. Rather, a Gaussian eddy is moved
43 artificially with uniform velocities. Velocities are calculated from geostrophy.
44
45 """
46 # Set Parcels FieldSet variables
47 time = np.arange(0.0, 8.0 * 86400.0, 86400.0, dtype=np.float64)
48
49 # Coordinates of the test fieldset (on A-grid in m)
50 if mesh == "spherical":
51 lon = np.linspace(0, 4, xdim, dtype=np.float32)
52 lat = np.linspace(45, 52, ydim, dtype=np.float32)
53 else:
54 lon = np.linspace(0, 4.0e5, xdim, dtype=np.float32)
55 lat = np.linspace(0, 7.0e5, ydim, dtype=np.float32)
56
57 # Grid spacing in m
58 def cosd(x):
59 return math.cos(math.radians(float(x)))
60
61 dx = (
62 (lon[1] - lon[0]) * 1852 * 60 * cosd(lat.mean())
63 if mesh == "spherical"
64 else lon[1] - lon[0]
65 )
66 dy = (lat[1] - lat[0]) * 1852 * 60 if mesh == "spherical" else lat[1] - lat[0]
67
68 # Define arrays U (zonal), V (meridional), and P (sea surface height) on A-grid
69 U = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
70 V = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
71 P = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
72
73 # Some constants
74 corio_0 = 1.0e-4 # Coriolis parameter
75 h0 = 1 # Max eddy height
76 sig = 0.5 # Eddy e-folding decay scale (in degrees)
77 g = 10 # Gravitational constant
78 eddyspeed = 0.1 # Translational speed in m/s
79 dX = eddyspeed * 86400 / dx # Grid cell movement of eddy max each day
80 dY = eddyspeed * 86400 / dy # Grid cell movement of eddy max each day
81
82 [x, y] = np.mgrid[: lon.size, : lat.size]
83 for t in range(time.size):
84 hymax_1 = lat.size / 7.0
85 hxmax_1 = 0.75 * lon.size - dX * t
86 hymax_2 = 3.0 * lat.size / 7.0 + dY * t
87 hxmax_2 = 0.75 * lon.size - dX * t
88
89 P[:, :, t] = h0 * np.exp(
90 -((x - hxmax_1) ** 2) / (sig * lon.size / 4.0) ** 2
91 - (y - hymax_1) ** 2 / (sig * lat.size / 7.0) ** 2
92 )
93 P[:, :, t] += h0 * np.exp(
94 -((x - hxmax_2) ** 2) / (sig * lon.size / 4.0) ** 2
95 - (y - hymax_2) ** 2 / (sig * lat.size / 7.0) ** 2
96 )
97
98 V[:-1, :, t] = -np.diff(P[:, :, t], axis=0) / dx / corio_0 * g
99 V[-1, :, t] = V[-2, :, t] # Fill in the last column
100
101 U[:, :-1, t] = np.diff(P[:, :, t], axis=1) / dy / corio_0 * g
102 U[:, -1, t] = U[:, -2, t] # Fill in the last row
103
104 data = {"U": U, "V": V, "P": P}
105 dimensions = {"lon": lon, "lat": lat, "time": time}
106
107 fieldset = parcels.FieldSet.from_data(data, dimensions, transpose=True, mesh=mesh)
108
109 # setting some constants for AdvectionRK45 kernel
110 fieldset.RK45_min_dt = 1e-3
111 fieldset.RK45_max_dt = 1e2
112 fieldset.RK45_tol = 1e-5
113 return fieldset
114
115
116def moving_eddies_example(
117 fieldset, outfile, npart=2, mode="jit", verbose=False, method=parcels.AdvectionRK4
118):
119 """Configuration of a particle set that follows two moving eddies.
120
121
122 Parameters
123 ----------
124 fieldset :
125 :class FieldSet: that defines the flow field
126 outfile :
127
128 npart :
129 Number of particles to initialise. (Default value = 2)
130 mode :
131 (Default value = 'jit')
132 verbose :
133 (Default value = False)
134 method :
135 (Default value = AdvectionRK4)
136 """
137 # Determine particle class according to mode
138 start = (3.3, 46.0) if fieldset.U.grid.mesh == "spherical" else (3.3e5, 1e5)
139 finish = (3.3, 47.8) if fieldset.U.grid.mesh == "spherical" else (3.3e5, 2.8e5)
140 pset = parcels.ParticleSet.from_line(
141 fieldset=fieldset, size=npart, pclass=ptype[mode], start=start, finish=finish
142 )
143
144 if verbose:
145 print(f"Initial particle positions:\n{pset}")
146
147 # Execute for 1 week, with 1 hour timesteps and hourly output
148 runtime = timedelta(days=7)
149 print(f"MovingEddies: Advecting {npart} particles for {runtime}")
150 pset.execute(
151 method,
152 runtime=runtime,
153 dt=timedelta(hours=1),
154 output_file=pset.ParticleFile(name=outfile, outputdt=timedelta(hours=1)),
155 )
156
157 if verbose:
158 print(f"Final particle positions:\n{pset}")
159
160 return pset
161
162
163@pytest.mark.parametrize("mode", ["scipy", "jit"])
164@pytest.mark.parametrize("mesh", ["flat", "spherical"])
165def test_moving_eddies_fwdbwd(mode, mesh, tmpdir, npart=2):
166 method = parcels.AdvectionRK4
167 fieldset = moving_eddies_fieldset(mesh=mesh)
168
169 # Determine particle class according to mode
170 lons = [3.3, 3.3] if fieldset.U.grid.mesh == "spherical" else [3.3e5, 3.3e5]
171 lats = [46.0, 47.8] if fieldset.U.grid.mesh == "spherical" else [1e5, 2.8e5]
172 pset = parcels.ParticleSet(
173 fieldset=fieldset, pclass=ptype[mode], lon=lons, lat=lats
174 )
175
176 # Execte for 14 days, with 30sec timesteps and hourly output
177 runtime = timedelta(days=1)
178 dt = timedelta(minutes=5)
179 outputdt = timedelta(hours=1)
180 print(f"MovingEddies: Advecting {npart} particles for {runtime}")
181 outfile = tmpdir.join("EddyParticlefwd")
182 pset.execute(
183 method,
184 runtime=runtime,
185 dt=dt,
186 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
187 )
188
189 print("Now running in backward time mode")
190 outfile = tmpdir.join("EddyParticlebwd")
191 pset.execute(
192 method,
193 endtime=0,
194 dt=-dt,
195 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
196 )
197
198 # Also include last timestep
199 for var in ["lon", "lat", "depth", "time"]:
200 pset.particledata.setallvardata(
201 f"{var}", pset.particledata.getvardata(f"{var}_nextloop")
202 )
203
204 assert np.allclose(pset.lon, lons)
205 assert np.allclose(pset.lat, lats)
206
207
208@pytest.mark.parametrize("mode", ["scipy", "jit"])
209@pytest.mark.parametrize("mesh", ["flat", "spherical"])
210def test_moving_eddies_fieldset(mode, mesh, tmpdir):
211 fieldset = moving_eddies_fieldset(mesh=mesh)
212 outfile = tmpdir.join("EddyParticle")
213 pset = moving_eddies_example(fieldset, outfile, 2, mode=mode)
214 # Also include last timestep
215 for var in ["lon", "lat", "depth", "time"]:
216 pset.particledata.setallvardata(
217 f"{var}", pset.particledata.getvardata(f"{var}_nextloop")
218 )
219 if mesh == "flat":
220 assert pset[0].lon < 2.2e5 and 1.1e5 < pset[0].lat < 1.2e5
221 assert pset[1].lon < 2.2e5 and 3.7e5 < pset[1].lat < 3.8e5
222 else:
223 assert pset[0].lon < 2.0 and 46.2 < pset[0].lat < 46.25
224 assert pset[1].lon < 2.0 and 48.8 < pset[1].lat < 48.85
225
226
227def fieldsetfile(mesh, tmpdir):
228 """Generate fieldset files for moving_eddies test."""
229 filename = tmpdir.join("moving_eddies")
230 fieldset = moving_eddies_fieldset(200, 350, mesh=mesh)
231 fieldset.write(filename)
232 return filename
233
234
235@pytest.mark.parametrize("mode", ["scipy", "jit"])
236@pytest.mark.parametrize("mesh", ["flat", "spherical"])
237def test_moving_eddies_file(mode, mesh, tmpdir):
238 gc.collect()
239 fieldset = parcels.FieldSet.from_parcels(
240 fieldsetfile(mesh, tmpdir), extra_fields={"P": "P"}
241 )
242 outfile = tmpdir.join("EddyParticle")
243 pset = moving_eddies_example(fieldset, outfile, 2, mode=mode)
244 # Also include last timestep
245 for var in ["lon", "lat", "depth", "time"]:
246 pset.particledata.setallvardata(
247 f"{var}", pset.particledata.getvardata(f"{var}_nextloop")
248 )
249 if mesh == "flat":
250 assert pset[0].lon < 2.2e5 and 1.1e5 < pset[0].lat < 1.2e5
251 assert pset[1].lon < 2.2e5 and 3.7e5 < pset[1].lat < 3.8e5
252 else:
253 assert pset[0].lon < 2.0 and 46.2 < pset[0].lat < 46.25
254 assert pset[1].lon < 2.0 and 48.8 < pset[1].lat < 48.85
255
256
257@pytest.mark.parametrize("mode", ["scipy", "jit"])
258def test_periodic_and_computeTimeChunk_eddies(mode):
259 data_folder = parcels.download_example_dataset("MovingEddies_data")
260 filename = str(data_folder / "moving_eddies")
261
262 fieldset = parcels.FieldSet.from_parcels(filename)
263 fieldset.add_constant("halo_west", fieldset.U.grid.lon[0])
264 fieldset.add_constant("halo_east", fieldset.U.grid.lon[-1])
265 fieldset.add_constant("halo_south", fieldset.U.grid.lat[0])
266 fieldset.add_constant("halo_north", fieldset.U.grid.lat[-1])
267 fieldset.add_periodic_halo(zonal=True, meridional=True)
268 pset = parcels.ParticleSet.from_list(
269 fieldset=fieldset, pclass=ptype[mode], lon=[3.3, 3.3], lat=[46.0, 47.8]
270 )
271
272 def periodicBC(particle, fieldset, time): # pragma: no cover
273 if particle.lon < fieldset.halo_west:
274 particle_dlon += fieldset.halo_east - fieldset.halo_west # noqa
275 elif particle.lon > fieldset.halo_east:
276 particle_dlon -= fieldset.halo_east - fieldset.halo_west
277 if particle.lat < fieldset.halo_south:
278 particle_dlat += fieldset.halo_north - fieldset.halo_south # noqa
279 elif particle.lat > fieldset.halo_north:
280 particle_dlat -= fieldset.halo_north - fieldset.halo_south
281
282 def slowlySouthWestward(particle, fieldset, time): # pragma: no cover
283 particle_dlon -= 5 * particle.dt / 1e5 # noqa
284 particle_dlat -= 3 * particle.dt / 1e5 # noqa
285
286 kernels = pset.Kernel(parcels.AdvectionRK4) + slowlySouthWestward + periodicBC
287 pset.execute(kernels, runtime=timedelta(days=6), dt=timedelta(hours=1))
288
289
290def main(args=None):
291 p = ArgumentParser(
292 description="""
293Example of particle advection around an idealised peninsula"""
294 )
295 p.add_argument(
296 "mode",
297 choices=("scipy", "jit"),
298 nargs="?",
299 default="jit",
300 help="Execution mode for performing RK4 computation",
301 )
302 p.add_argument(
303 "-p", "--particles", type=int, default=2, help="Number of particles to advect"
304 )
305 p.add_argument(
306 "-v",
307 "--verbose",
308 action="store_true",
309 default=False,
310 help="Print particle information before and after execution",
311 )
312 p.add_argument(
313 "--profiling",
314 action="store_true",
315 default=False,
316 help="Print profiling information after run",
317 )
318 p.add_argument(
319 "-f",
320 "--fieldset",
321 type=int,
322 nargs=2,
323 default=None,
324 help="Generate fieldset file with given dimensions",
325 )
326 p.add_argument(
327 "-m",
328 "--method",
329 choices=("RK4", "EE", "RK45"),
330 default="RK4",
331 help="Numerical method used for advection",
332 )
333 args = p.parse_args(args)
334 data_folder = parcels.download_example_dataset("MovingEddies_data")
335 filename = str(data_folder / "moving_eddies")
336
337 # Generate fieldset files according to given dimensions
338 if args.fieldset is not None:
339 fieldset = moving_eddies_fieldset(
340 args.fieldset[0], args.fieldset[1], mesh="flat"
341 )
342 else:
343 fieldset = moving_eddies_fieldset(mesh="flat")
344 outfile = "EddyParticle"
345
346 if args.profiling:
347 from cProfile import runctx
348 from pstats import Stats
349
350 runctx(
351 "moving_eddies_example(fieldset, outfile, args.particles, mode=args.mode, \
352 verbose=args.verbose, method=method[args.method])",
353 globals(),
354 locals(),
355 "Profile.prof",
356 )
357 Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
358 else:
359 moving_eddies_example(
360 fieldset,
361 outfile,
362 args.particles,
363 mode=args.mode,
364 verbose=args.verbose,
365 method=method[args.method],
366 )
367
368
369if __name__ == "__main__":
370 main()
example_nemo_curvilinear.py#
1"""Example script that runs a set of particles in a NEMO curvilinear grid."""
2
3from argparse import ArgumentParser
4from datetime import timedelta
5from glob import glob
6
7import numpy as np
8import pytest
9
10import parcels
11
12ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
13advection = {"RK4": parcels.AdvectionRK4, "AA": parcels.AdvectionAnalytical}
14
15
16def run_nemo_curvilinear(mode, outfile, advtype="RK4"):
17 """Run parcels on the NEMO curvilinear grid."""
18 data_folder = parcels.download_example_dataset("NemoCurvilinear_data")
19
20 filenames = {
21 "U": {
22 "lon": f"{data_folder}/mesh_mask.nc4",
23 "lat": f"{data_folder}/mesh_mask.nc4",
24 "data": f"{data_folder}/U_purely_zonal-ORCA025_grid_U.nc4",
25 },
26 "V": {
27 "lon": f"{data_folder}/mesh_mask.nc4",
28 "lat": f"{data_folder}/mesh_mask.nc4",
29 "data": f"{data_folder}/V_purely_zonal-ORCA025_grid_V.nc4",
30 },
31 }
32 variables = {"U": "U", "V": "V"}
33 dimensions = {"lon": "glamf", "lat": "gphif"}
34 chunksize = {"lat": ("y", 256), "lon": ("x", 512)}
35 fieldset = parcels.FieldSet.from_nemo(
36 filenames, variables, dimensions, chunksize=chunksize
37 )
38 assert fieldset.U.chunksize == chunksize
39
40 # Now run particles as normal
41 npart = 20
42 lonp = 30 * np.ones(npart)
43 if advtype == "RK4":
44 latp = np.linspace(-70, 88, npart)
45 runtime = timedelta(days=160)
46 else:
47 latp = np.linspace(-70, 70, npart)
48 runtime = timedelta(days=15)
49
50 def periodicBC(particle, fieldSet, time): # pragma: no cover
51 if particle.lon > 180:
52 particle_dlon -= 360 # noqa
53
54 pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp)
55 pfile = parcels.ParticleFile(outfile, pset, outputdt=timedelta(days=1))
56 kernels = pset.Kernel(advection[advtype]) + periodicBC
57 pset.execute(kernels, runtime=runtime, dt=timedelta(hours=6), output_file=pfile)
58 assert np.allclose(pset.lat - latp, 0, atol=2e-2)
59
60
61@pytest.mark.parametrize("mode", ["jit"]) # Only testing jit as scipy is very slow
62def test_nemo_curvilinear(mode, tmpdir):
63 """Test the NEMO curvilinear example."""
64 outfile = tmpdir.join("nemo_particles")
65 run_nemo_curvilinear(mode, outfile)
66
67
68def test_nemo_curvilinear_AA(tmpdir):
69 """Test the NEMO curvilinear example with analytical advection."""
70 outfile = tmpdir.join("nemo_particlesAA")
71 run_nemo_curvilinear("scipy", outfile, "AA")
72
73
74def test_nemo_3D_samegrid():
75 """Test that the same grid is used for U and V in 3D NEMO fields."""
76 data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
77 ufiles = sorted(glob(f"{data_folder}/ORCA*U.nc"))
78 vfiles = sorted(glob(f"{data_folder}/ORCA*V.nc"))
79 wfiles = sorted(glob(f"{data_folder}/ORCA*W.nc"))
80 mesh_mask = f"{data_folder}/coordinates.nc"
81
82 filenames = {
83 "U": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": ufiles},
84 "V": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": vfiles},
85 "W": {"lon": mesh_mask, "lat": mesh_mask, "depth": wfiles[0], "data": wfiles},
86 }
87
88 variables = {"U": "uo", "V": "vo", "W": "wo"}
89 dimensions = {
90 "U": {
91 "lon": "glamf",
92 "lat": "gphif",
93 "depth": "depthw",
94 "time": "time_counter",
95 },
96 "V": {
97 "lon": "glamf",
98 "lat": "gphif",
99 "depth": "depthw",
100 "time": "time_counter",
101 },
102 "W": {
103 "lon": "glamf",
104 "lat": "gphif",
105 "depth": "depthw",
106 "time": "time_counter",
107 },
108 }
109
110 fieldset = parcels.FieldSet.from_nemo(filenames, variables, dimensions)
111
112 assert fieldset.U._dataFiles is not fieldset.W._dataFiles
113
114
115def main(args=None):
116 """Run the example with given arguments."""
117 p = ArgumentParser(description="""Chose the mode using mode option""")
118 p.add_argument(
119 "mode",
120 choices=("scipy", "jit"),
121 nargs="?",
122 default="jit",
123 help="Execution mode for performing computation",
124 )
125 args = p.parse_args(args)
126
127 outfile = "nemo_particles"
128
129 run_nemo_curvilinear(args.mode, outfile)
130
131
132if __name__ == "__main__":
133 main()
example_ofam.py#
1import gc
2from datetime import timedelta
3
4import numpy as np
5import pytest
6import xarray as xr
7
8import parcels
9
10ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
11
12
13def set_ofam_fieldset(deferred_load=True, use_xarray=False):
14 data_folder = parcels.download_example_dataset("OFAM_example_data")
15 filenames = {
16 "U": f"{data_folder}/OFAM_simple_U.nc",
17 "V": f"{data_folder}/OFAM_simple_V.nc",
18 }
19 variables = {"U": "u", "V": "v"}
20 dimensions = {
21 "lat": "yu_ocean",
22 "lon": "xu_ocean",
23 "depth": "st_ocean",
24 "time": "Time",
25 }
26 if use_xarray:
27 ds = xr.open_mfdataset([filenames["U"], filenames["V"]], combine="by_coords")
28 return parcels.FieldSet.from_xarray_dataset(
29 ds, variables, dimensions, allow_time_extrapolation=True
30 )
31 else:
32 return parcels.FieldSet.from_netcdf(
33 filenames,
34 variables,
35 dimensions,
36 allow_time_extrapolation=True,
37 deferred_load=deferred_load,
38 chunksize=False,
39 )
40
41
42@pytest.mark.parametrize("use_xarray", [True, False])
43def test_ofam_fieldset_fillvalues(use_xarray):
44 fieldset = set_ofam_fieldset(deferred_load=False, use_xarray=use_xarray)
45 # V.data[0, 0, 150] is a landpoint, that makes NetCDF4 generate a masked array, instead of an ndarray
46 assert fieldset.V.data[0, 0, 150] == 0
47
48
49@pytest.mark.parametrize("dt", [timedelta(minutes=-5), timedelta(minutes=5)])
50def test_ofam_xarray_vs_netcdf(dt):
51 fieldsetNetcdf = set_ofam_fieldset(use_xarray=False)
52 fieldsetxarray = set_ofam_fieldset(use_xarray=True)
53 lonstart, latstart, runtime = (180, 10, timedelta(days=7))
54
55 psetN = parcels.ParticleSet(
56 fieldsetNetcdf, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
57 )
58 psetN.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
59
60 psetX = parcels.ParticleSet(
61 fieldsetxarray, pclass=parcels.JITParticle, lon=lonstart, lat=latstart
62 )
63 psetX.execute(parcels.AdvectionRK4, runtime=runtime, dt=dt)
64
65 assert np.allclose(psetN[0].lon, psetX[0].lon)
66 assert np.allclose(psetN[0].lat, psetX[0].lat)
67
68
69@pytest.mark.parametrize("use_xarray", [True, False])
70@pytest.mark.parametrize("mode", ["scipy", "jit"])
71def test_ofam_particles(mode, use_xarray):
72 gc.collect()
73 fieldset = set_ofam_fieldset(use_xarray=use_xarray)
74
75 lonstart = [180]
76 latstart = [10]
77 depstart = [2.5] # the depth of the first layer in OFAM
78
79 pset = parcels.ParticleSet(
80 fieldset, pclass=ptype[mode], lon=lonstart, lat=latstart, depth=depstart
81 )
82
83 pset.execute(
84 parcels.AdvectionRK4, runtime=timedelta(days=10), dt=timedelta(minutes=5)
85 )
86
87 assert abs(pset[0].lon - 173) < 1
88 assert abs(pset[0].lat - 11) < 1
example_peninsula.py#
1import gc
2import math # NOQA
3from argparse import ArgumentParser
4from datetime import timedelta
5
6import numpy as np
7import pytest
8
9import parcels
10
11ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
12method = {
13 "RK4": parcels.AdvectionRK4,
14 "EE": parcels.AdvectionEE,
15 "RK45": parcels.AdvectionRK45,
16}
17
18
19def peninsula_fieldset(xdim, ydim, mesh="flat", grid_type="A"):
20 """Construct a fieldset encapsulating the flow field around an idealised peninsula.
21
22 Parameters
23 ----------
24 xdim :
25 Horizontal dimension of the generated fieldset
26 xdim :
27 Vertical dimension of the generated fieldset
28 mesh : str
29 String indicating the type of mesh coordinates and
30 units used during velocity interpolation:
31
32 1. spherical: Lat and lon in degree, with a
33 correction for zonal velocity U near the poles.
34 2. flat (default): No conversion, lat/lon are assumed to be in m.
35 grid_type :
36 Option whether grid is either Arakawa A (default) or C
37
38 The original test description can be found in Fig. 2.2.3 in:
39 North, E. W., Gallego, A., Petitgas, P. (Eds). 2009. Manual of
40 recommended practices for modelling physical - biological
41 interactions during fish early life.
42 ICES Cooperative Research Report No. 295. 111 pp.
43 http://archimer.ifremer.fr/doc/00157/26792/24888.pdf
44 ydim :
45
46
47 """
48 # Set Parcels FieldSet variables
49
50 # Generate the original test setup on A-grid in m
51 domainsizeX, domainsizeY = (1.0e5, 5.0e4)
52 La = np.linspace(1e3, domainsizeX, xdim, dtype=np.float32)
53 Wa = np.linspace(1e3, domainsizeY, ydim, dtype=np.float32)
54
55 u0 = 1
56 x0 = domainsizeX / 2
57 R = 0.32 * domainsizeX / 2
58
59 # Create the fields
60 x, y = np.meshgrid(La, Wa, sparse=True, indexing="xy")
61 P = u0 * R**2 * y / ((x - x0) ** 2 + y**2) - u0 * y
62
63 # Set land points to zero
64 landpoints = P >= 0.0
65 P[landpoints] = 0.0
66
67 if grid_type == "A":
68 U = u0 - u0 * R**2 * ((x - x0) ** 2 - y**2) / (((x - x0) ** 2 + y**2) ** 2)
69 V = -2 * u0 * R**2 * ((x - x0) * y) / (((x - x0) ** 2 + y**2) ** 2)
70 U[landpoints] = 0.0
71 V[landpoints] = 0.0
72 elif grid_type == "C":
73 U = np.zeros(P.shape)
74 V = np.zeros(P.shape)
75 V[:, 1:] = (P[:, 1:] - P[:, :-1]) / (La[1] - La[0])
76 U[1:, :] = -(P[1:, :] - P[:-1, :]) / (Wa[1] - Wa[0])
77 else:
78 raise RuntimeError(f"Grid_type {grid_type} is not a valid option")
79
80 # Convert from m to lat/lon for spherical meshes
81 lon = La / 1852.0 / 60.0 if mesh == "spherical" else La
82 lat = Wa / 1852.0 / 60.0 if mesh == "spherical" else Wa
83
84 data = {"U": U, "V": V, "P": P}
85 dimensions = {"lon": lon, "lat": lat}
86
87 fieldset = parcels.FieldSet.from_data(data, dimensions, mesh=mesh)
88 if grid_type == "C":
89 fieldset.U.interp_method = "cgrid_velocity"
90 fieldset.V.interp_method = "cgrid_velocity"
91 return fieldset
92
93
94def UpdateP(particle, fieldset, time): # pragma: no cover
95 if time == 0:
96 particle.p_start = fieldset.P[time, particle.depth, particle.lat, particle.lon]
97 particle.p = fieldset.P[time, particle.depth, particle.lat, particle.lon]
98
99
100def peninsula_example(
101 fieldset,
102 outfile,
103 npart,
104 mode="jit",
105 degree=1,
106 verbose=False,
107 output=True,
108 method=parcels.AdvectionRK4,
109):
110 """Example configuration of particle flow around an idealised Peninsula
111
112 Parameters
113 ----------
114 fieldset :
115
116 outfile : str
117 Basename of the input fieldset.
118 npart : int
119 Number of particles to intialise.
120 mode :
121 (Default value = 'jit')
122 degree :
123 (Default value = 1)
124 verbose :
125 (Default value = False)
126 output :
127 (Default value = True)
128 method :
129 (Default value = AdvectionRK4)
130
131 """
132 # First, we define a custom Particle class to which we add a
133 # custom variable, the initial stream function value p.
134 # We determine the particle base class according to mode.
135 MyParticle = ptype[mode].add_variable(
136 [
137 parcels.Variable("p", dtype=np.float32, initial=0.0),
138 parcels.Variable("p_start", dtype=np.float32, initial=0),
139 ]
140 )
141
142 # Initialise particles
143 if fieldset.U.grid.mesh == "flat":
144 x = 3000 # 3 km offset from boundary
145 else:
146 x = 3.0 * (1.0 / 1.852 / 60) # 3 km offset from boundary
147 y = (
148 fieldset.U.lat[0] + x,
149 fieldset.U.lat[-1] - x,
150 ) # latitude range, including offsets
151 pset = parcels.ParticleSet.from_line(
152 fieldset,
153 size=npart,
154 pclass=MyParticle,
155 start=(x, y[0]),
156 finish=(x, y[1]),
157 time=0,
158 )
159
160 if verbose:
161 print(f"Initial particle positions:\n{pset}")
162
163 # Advect the particles for 24h
164 time = timedelta(hours=24)
165 dt = timedelta(minutes=5)
166 k_adv = pset.Kernel(method)
167 k_p = pset.Kernel(UpdateP)
168 out = (
169 pset.ParticleFile(name=outfile, outputdt=timedelta(hours=1)) if output else None
170 )
171 print(f"Peninsula: Advecting {npart} particles for {time}")
172 pset.execute(k_adv + k_p, runtime=time, dt=dt, output_file=out)
173
174 if verbose:
175 print(f"Final particle positions:\n{pset}")
176
177 return pset
178
179
180@pytest.mark.parametrize("mode", ["scipy", "jit"])
181@pytest.mark.parametrize("mesh", ["flat", "spherical"])
182def test_peninsula_fieldset(mode, mesh, tmpdir):
183 """Execute peninsula test from fieldset generated in memory."""
184 fieldset = peninsula_fieldset(100, 50, mesh)
185 outfile = tmpdir.join("Peninsula")
186 pset = peninsula_example(fieldset, outfile, 5, mode=mode, degree=1)
187 # Test advection accuracy by comparing streamline values
188 err_adv = np.abs(pset.p_start - pset.p)
189 assert (err_adv <= 1.0).all()
190 # Test Field sampling accuracy by comparing kernel against Field sampling
191 err_smpl = np.array(
192 [
193 abs(
194 pset.p[i]
195 - pset.fieldset.P[0.0, pset.depth[i], pset.lat[i], pset.lon[i]]
196 )
197 for i in range(pset.size)
198 ]
199 )
200 assert (err_smpl <= 1.0).all()
201
202
203@pytest.mark.parametrize(
204 "mode", ["scipy"]
205) # Analytical Advection only implemented in Scipy mode
206@pytest.mark.parametrize("mesh", ["flat", "spherical"])
207def test_peninsula_fieldset_AnalyticalAdvection(mode, mesh, tmpdir):
208 """Execute peninsula test using Analytical Advection on C grid."""
209 fieldset = peninsula_fieldset(101, 51, "flat", grid_type="C")
210 outfile = tmpdir.join("PeninsulaAA")
211 pset = peninsula_example(
212 fieldset, outfile, npart=10, mode=mode, method=parcels.AdvectionAnalytical
213 )
214 # Test advection accuracy by comparing streamline values
215 err_adv = np.array([abs(p.p_start - p.p) for p in pset])
216
217 tol = {"scipy": 3.0e2, "jit": 1.0e2}.get(mode)
218 assert (err_adv <= tol).all()
219
220
221def fieldsetfile(mesh, tmpdir):
222 """Generate fieldset files for peninsula test."""
223 filename = tmpdir.join("peninsula")
224 fieldset = peninsula_fieldset(100, 50, mesh=mesh)
225 fieldset.write(filename)
226 return filename
227
228
229@pytest.mark.parametrize("mode", ["scipy", "jit"])
230@pytest.mark.parametrize("mesh", ["flat", "spherical"])
231def test_peninsula_file(mode, mesh, tmpdir):
232 """Open fieldset files and execute."""
233 gc.collect()
234 fieldset = parcels.FieldSet.from_parcels(
235 fieldsetfile(mesh, tmpdir),
236 extra_fields={"P": "P"},
237 allow_time_extrapolation=True,
238 )
239 outfile = tmpdir.join("Peninsula")
240 pset = peninsula_example(fieldset, outfile, 5, mode=mode, degree=1)
241 # Test advection accuracy by comparing streamline values
242 err_adv = np.abs(pset.p_start - pset.p)
243 assert (err_adv <= 1.0).all()
244 # Test Field sampling accuracy by comparing kernel against Field sampling
245 err_smpl = np.array(
246 [
247 abs(
248 pset.p[i]
249 - pset.fieldset.P[0.0, pset.depth[i], pset.lat[i], pset.lon[i]]
250 )
251 for i in range(pset.size)
252 ]
253 )
254 assert (err_smpl <= 1.0).all()
255
256
257def main(args=None):
258 p = ArgumentParser(
259 description="""
260Example of particle advection around an idealised peninsula"""
261 )
262 p.add_argument(
263 "mode",
264 choices=("scipy", "jit"),
265 nargs="?",
266 default="jit",
267 help="Execution mode for performing RK4 computation",
268 )
269 p.add_argument(
270 "-p", "--particles", type=int, default=20, help="Number of particles to advect"
271 )
272 p.add_argument(
273 "-d", "--degree", type=int, default=1, help="Degree of spatial interpolation"
274 )
275 p.add_argument(
276 "-v",
277 "--verbose",
278 action="store_true",
279 default=False,
280 help="Print particle information before and after execution",
281 )
282 p.add_argument(
283 "-o",
284 "--nooutput",
285 action="store_true",
286 default=False,
287 help="Suppress trajectory output",
288 )
289 p.add_argument(
290 "--profiling",
291 action="store_true",
292 default=False,
293 help="Print profiling information after run",
294 )
295 p.add_argument(
296 "-f",
297 "--fieldset",
298 type=int,
299 nargs=2,
300 default=None,
301 help="Generate fieldset file with given dimensions",
302 )
303 p.add_argument(
304 "-m",
305 "--method",
306 choices=("RK4", "EE", "RK45"),
307 default="RK4",
308 help="Numerical method used for advection",
309 )
310 args = p.parse_args(args)
311
312 filename = "peninsula"
313 if args.fieldset is not None:
314 fieldset = peninsula_fieldset(args.fieldset[0], args.fieldset[1], mesh="flat")
315 else:
316 fieldset = peninsula_fieldset(100, 50, mesh="flat")
317 fieldset.write(filename)
318
319 # Open fieldset file set
320 fieldset = parcels.FieldSet.from_parcels(
321 "peninsula", extra_fields={"P": "P"}, allow_time_extrapolation=True
322 )
323 outfile = "Peninsula"
324
325 if args.profiling:
326 from cProfile import runctx
327 from pstats import Stats
328
329 runctx(
330 "peninsula_example(fieldset, outfile, args.particles, mode=args.mode,\
331 degree=args.degree, verbose=args.verbose,\
332 output=not args.nooutput, method=method[args.method])",
333 globals(),
334 locals(),
335 "Profile.prof",
336 )
337 Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
338 else:
339 peninsula_example(
340 fieldset,
341 outfile,
342 args.particles,
343 mode=args.mode,
344 degree=args.degree,
345 verbose=args.verbose,
346 output=not args.nooutput,
347 method=method[args.method],
348 )
349
350
351if __name__ == "__main__":
352 main()
example_radial_rotation.py#
1import math
2from datetime import timedelta
3
4import numpy as np
5import pytest
6
7import parcels
8
9ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
10
11
12def radial_rotation_fieldset(
13 xdim=200, ydim=200
14): # Define 2D flat, square fieldset for testing purposes.
15 lon = np.linspace(0, 60, xdim, dtype=np.float32)
16 lat = np.linspace(0, 60, ydim, dtype=np.float32)
17
18 x0 = 30.0 # Define the origin to be the centre of the Field.
19 y0 = 30.0
20
21 U = np.zeros((ydim, xdim), dtype=np.float32)
22 V = np.zeros((ydim, xdim), dtype=np.float32)
23
24 T = timedelta(days=1)
25 omega = 2 * np.pi / T.total_seconds() # Define the rotational period as 1 day.
26
27 for i in range(lon.size):
28 for j in range(lat.size):
29 r = np.sqrt(
30 (lon[i] - x0) ** 2 + (lat[j] - y0) ** 2
31 ) # Define radial displacement.
32 assert r >= 0.0
33 assert r <= np.sqrt(x0**2 + y0**2)
34
35 theta = math.atan2((lat[j] - y0), (lon[i] - x0)) # Define the polar angle.
36 assert abs(theta) <= np.pi
37
38 U[j, i] = r * math.sin(theta) * omega
39 V[j, i] = -r * math.cos(theta) * omega
40
41 data = {"U": U, "V": V}
42 dimensions = {"lon": lon, "lat": lat}
43 return parcels.FieldSet.from_data(data, dimensions, mesh="flat")
44
45
46def true_values(age): # Calculate the expected values for particle 2 at the endtime.
47 x = 20 * math.sin(2 * np.pi * age / (24.0 * 60.0**2)) + 30.0
48 y = 20 * math.cos(2 * np.pi * age / (24.0 * 60.0**2)) + 30.0
49
50 return [x, y]
51
52
53def rotation_example(fieldset, outfile, mode="jit", method=parcels.AdvectionRK4):
54 npart = 2 # Test two particles on the rotating fieldset.
55 pset = parcels.ParticleSet.from_line(
56 fieldset,
57 size=npart,
58 pclass=ptype[mode],
59 start=(30.0, 30.0),
60 finish=(30.0, 50.0),
61 ) # One particle in centre, one on periphery of Field.
62
63 runtime = timedelta(hours=17)
64 dt = timedelta(minutes=5)
65 outputdt = timedelta(hours=1)
66
67 pset.execute(
68 method,
69 runtime=runtime,
70 dt=dt,
71 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
72 )
73
74 return pset
75
76
77@pytest.mark.parametrize("mode", ["scipy", "jit"])
78def test_rotation_example(mode, tmpdir):
79 fieldset = radial_rotation_fieldset()
80 outfile = tmpdir.join("RadialParticle")
81 pset = rotation_example(fieldset, outfile, mode=mode)
82 assert (
83 pset[0].lon == 30.0 and pset[0].lat == 30.0
84 ) # Particle at centre of Field remains stationary.
85 vals = true_values(pset.time[1])
86 assert np.allclose(
87 pset[1].lon, vals[0], 1e-5
88 ) # Check advected values against calculated values.
89 assert np.allclose(pset[1].lat, vals[1], 1e-5)
90
91
92def main():
93 filename = "radial_rotation"
94 fieldset = radial_rotation_fieldset()
95 fieldset.write(filename)
96
97 outfile = "RadialParticle"
98 rotation_example(fieldset, outfile)
99
100
101if __name__ == "__main__":
102 main()
example_stommel.py#
1import math
2from argparse import ArgumentParser
3from datetime import timedelta
4
5import numpy as np
6import pytest
7
8import parcels
9
10ptype = {"scipy": parcels.ScipyParticle, "jit": parcels.JITParticle}
11method = {
12 "RK4": parcels.AdvectionRK4,
13 "EE": parcels.AdvectionEE,
14 "RK45": parcels.AdvectionRK45,
15}
16
17
18def stommel_fieldset(xdim=200, ydim=200, grid_type="A"):
19 """Simulate a periodic current along a western boundary, with significantly
20 larger velocities along the western edge than the rest of the region
21
22 The original test description can be found in: N. Fabbroni, 2009,
23 Numerical Simulation of Passive tracers dispersion in the sea,
24 Ph.D. dissertation, University of Bologna
25 http://amsdottorato.unibo.it/1733/1/Fabbroni_Nicoletta_Tesi.pdf
26 """
27 a = b = 10000 * 1e3
28 scalefac = 0.05 # to scale for physically meaningful velocities
29 dx, dy = a / xdim, b / ydim
30
31 # Coordinates of the test fieldset (on A-grid in deg)
32 lon = np.linspace(0, a, xdim, dtype=np.float32)
33 lat = np.linspace(0, b, ydim, dtype=np.float32)
34
35 # Define arrays U (zonal), V (meridional) and P (sea surface height)
36 U = np.zeros((lat.size, lon.size), dtype=np.float32)
37 V = np.zeros((lat.size, lon.size), dtype=np.float32)
38 P = np.zeros((lat.size, lon.size), dtype=np.float32)
39
40 beta = 2e-11
41 r = 1 / (11.6 * 86400)
42 es = r / (beta * a)
43
44 for j in range(lat.size):
45 for i in range(lon.size):
46 xi = lon[i] / a
47 yi = lat[j] / b
48 P[j, i] = (
49 (1 - math.exp(-xi / es) - xi)
50 * math.pi
51 * np.sin(math.pi * yi)
52 * scalefac
53 )
54 if grid_type == "A":
55 U[j, i] = (
56 -(1 - math.exp(-xi / es) - xi)
57 * math.pi**2
58 * np.cos(math.pi * yi)
59 * scalefac
60 )
61 V[j, i] = (
62 (math.exp(-xi / es) / es - 1)
63 * math.pi
64 * np.sin(math.pi * yi)
65 * scalefac
66 )
67 if grid_type == "C":
68 V[:, 1:] = (P[:, 1:] - P[:, 0:-1]) / dx * a
69 U[1:, :] = -(P[1:, :] - P[0:-1, :]) / dy * b
70
71 data = {"U": U, "V": V, "P": P}
72 dimensions = {"lon": lon, "lat": lat}
73 fieldset = parcels.FieldSet.from_data(data, dimensions, mesh="flat")
74 if grid_type == "C":
75 fieldset.U.interp_method = "cgrid_velocity"
76 fieldset.V.interp_method = "cgrid_velocity"
77 return fieldset
78
79
80def UpdateP(particle, fieldset, time): # pragma: no cover
81 if time == 0:
82 particle.p_start = fieldset.P[time, particle.depth, particle.lat, particle.lon]
83 particle.p = fieldset.P[time, particle.depth, particle.lat, particle.lon]
84
85
86def AgeP(particle, fieldset, time): # pragma: no cover
87 particle.age += particle.dt
88 if particle.age > fieldset.maxage:
89 particle.delete()
90
91
92def simple_partition_function(coords, mpi_size=1):
93 """A very simple partition function that assigns particles to processors (for MPI testing purposes))"""
94 return np.linspace(0, mpi_size, coords.shape[0], endpoint=False, dtype=np.int32)
95
96
97def stommel_example(
98 npart=1,
99 mode="jit",
100 verbose=False,
101 method=parcels.AdvectionRK4,
102 grid_type="A",
103 outfile="StommelParticle.zarr",
104 repeatdt=None,
105 maxage=None,
106 write_fields=True,
107 custom_partition_function=False,
108):
109 parcels.timer.fieldset = parcels.timer.Timer(
110 "FieldSet", parent=parcels.timer.stommel
111 )
112 fieldset = stommel_fieldset(grid_type=grid_type)
113 if write_fields:
114 filename = "stommel"
115 fieldset.write(filename)
116 parcels.timer.fieldset.stop()
117
118 # Determine particle class according to mode
119 parcels.timer.pset = parcels.timer.Timer("Pset", parent=parcels.timer.stommel)
120 parcels.timer.psetinit = parcels.timer.Timer("Pset_init", parent=parcels.timer.pset)
121 ParticleClass = parcels.JITParticle if mode == "jit" else parcels.ScipyParticle
122
123 # Execute for 600 days, with 1-hour timesteps and 5-day output
124 runtime = timedelta(days=600)
125 dt = timedelta(hours=1)
126 outputdt = timedelta(days=5)
127
128 extra_vars = [
129 parcels.Variable("p", dtype=np.float32, initial=0.0),
130 parcels.Variable("p_start", dtype=np.float32, initial=0.0),
131 parcels.Variable("next_dt", dtype=np.float64, initial=dt.total_seconds()),
132 parcels.Variable("age", dtype=np.float32, initial=0.0),
133 ]
134 MyParticle = ParticleClass.add_variables(extra_vars)
135
136 if custom_partition_function:
137 pset = parcels.ParticleSet.from_line(
138 fieldset,
139 size=npart,
140 pclass=MyParticle,
141 repeatdt=repeatdt,
142 start=(10e3, 5000e3),
143 finish=(100e3, 5000e3),
144 time=0,
145 partition_function=simple_partition_function,
146 )
147 else:
148 pset = parcels.ParticleSet.from_line(
149 fieldset,
150 size=npart,
151 pclass=MyParticle,
152 repeatdt=repeatdt,
153 start=(10e3, 5000e3),
154 finish=(100e3, 5000e3),
155 time=0,
156 )
157
158 if verbose:
159 print(f"Initial particle positions:\n{pset}")
160
161 maxage = runtime.total_seconds() if maxage is None else maxage
162 fieldset.add_constant("maxage", maxage)
163 print(f"Stommel: Advecting {npart} particles for {runtime}")
164 parcels.timer.psetinit.stop()
165 parcels.timer.psetrun = parcels.timer.Timer("Pset_run", parent=parcels.timer.pset)
166 pset.execute(
167 method + pset.Kernel(UpdateP) + pset.Kernel(AgeP),
168 runtime=runtime,
169 dt=dt,
170 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt),
171 )
172
173 if verbose:
174 print(f"Final particle positions:\n{pset}")
175 parcels.timer.psetrun.stop()
176 parcels.timer.pset.stop()
177
178 return pset
179
180
181@pytest.mark.parametrize("grid_type", ["A", "C"])
182@pytest.mark.parametrize("mode", ["jit", "scipy"])
183def test_stommel_fieldset(mode, grid_type, tmpdir):
184 parcels.timer.root = parcels.timer.Timer("Main")
185 parcels.timer.stommel = parcels.timer.Timer("Stommel", parent=parcels.timer.root)
186 outfile = tmpdir.join("StommelParticle")
187 psetRK4 = stommel_example(
188 1,
189 mode=mode,
190 method=method["RK4"],
191 grid_type=grid_type,
192 outfile=outfile,
193 write_fields=False,
194 )
195 psetRK45 = stommel_example(
196 1,
197 mode=mode,
198 method=method["RK45"],
199 grid_type=grid_type,
200 outfile=outfile,
201 write_fields=False,
202 )
203 assert np.allclose(psetRK4.lon, psetRK45.lon, rtol=1e-3)
204 assert np.allclose(psetRK4.lat, psetRK45.lat, rtol=1.1e-3)
205 err_adv = np.abs(psetRK4.p_start - psetRK4.p)
206 assert (err_adv <= 1.0e-1).all()
207 err_smpl = np.array(
208 [
209 abs(
210 psetRK4.p[i]
211 - psetRK4.fieldset.P[
212 0.0, psetRK4.lon[i], psetRK4.lat[i], psetRK4.depth[i]
213 ]
214 )
215 for i in range(psetRK4.size)
216 ]
217 )
218 assert (err_smpl <= 1.0e-1).all()
219 parcels.timer.stommel.stop()
220 parcels.timer.root.stop()
221 parcels.timer.root.print_tree()
222
223
224def main(args=None):
225 parcels.timer.root = parcels.timer.Timer("Main")
226 parcels.timer.args = parcels.timer.Timer("Args", parent=parcels.timer.root)
227 p = ArgumentParser(
228 description="""
229Example of particle advection in the steady-state solution of the Stommel equation"""
230 )
231 p.add_argument(
232 "mode",
233 choices=("scipy", "jit"),
234 nargs="?",
235 default="jit",
236 help="Execution mode for performing computation",
237 )
238 p.add_argument(
239 "-p", "--particles", type=int, default=1, help="Number of particles to advect"
240 )
241 p.add_argument(
242 "-v",
243 "--verbose",
244 action="store_true",
245 default=False,
246 help="Print particle information before and after execution",
247 )
248 p.add_argument(
249 "-m",
250 "--method",
251 choices=("RK4", "EE", "RK45"),
252 default="RK4",
253 help="Numerical method used for advection",
254 )
255 p.add_argument(
256 "-o", "--outfile", default="StommelParticle.zarr", help="Name of output file"
257 )
258 p.add_argument(
259 "-r", "--repeatdt", default=None, type=int, help="repeatdt of the ParticleSet"
260 )
261 p.add_argument(
262 "-a",
263 "--maxage",
264 default=None,
265 type=int,
266 help="max age of the particles (after which particles are deleted)",
267 )
268 p.add_argument(
269 "-wf",
270 "--write_fields",
271 default=True,
272 help="Write the hydrodynamic fields to NetCDF",
273 )
274 p.add_argument(
275 "-cpf",
276 "--custom_partition_function",
277 default=False,
278 help="Use a custom partition_function (for MPI testing purposes)",
279 )
280 args = p.parse_args(args)
281
282 parcels.timer.args.stop()
283 parcels.timer.stommel = parcels.timer.Timer("Stommel", parent=parcels.timer.root)
284 stommel_example(
285 args.particles,
286 mode=args.mode,
287 verbose=args.verbose,
288 method=method[args.method],
289 outfile=args.outfile,
290 repeatdt=args.repeatdt,
291 maxage=args.maxage,
292 write_fields=args.write_fields,
293 custom_partition_function=args.custom_partition_function,
294 )
295 parcels.timer.stommel.stop()
296 parcels.timer.root.stop()
297 parcels.timer.root.print_tree()
298
299
300if __name__ == "__main__":
301 main()