diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 92e02092a0..4657501702 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -196,7 +196,7 @@ steps: - "$SVERDRUP_HOME/julia-$JULIA_VERSION/bin/julia --color=yes --project -e 'using Pkg; Pkg.test()'" agents: queue: Oceananigans - architecture: GPU' + architecture: GPU depends_on: "init_gpu" - label: "🦍 cpu scripts" diff --git a/docs/src/model_setup/background_fields.md b/docs/src/model_setup/background_fields.md index 1fefe326e6..c9aaf23ed0 100644 --- a/docs/src/model_setup/background_fields.md +++ b/docs/src/model_setup/background_fields.md @@ -53,7 +53,7 @@ model.background_fields.velocities.u FunctionField located at (Face, Cell, Cell) ├── func: U ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=1, Ny=1, Nz=1) -├── clock: Clock(time=0.000 s, iteration=0) +├── clock: Clock(time=0 seconds, iteration=0) └── parameters: nothing ``` @@ -96,6 +96,6 @@ model.background_fields.tracers.b FunctionField located at (Cell, Cell, Cell) ├── func: B ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=1, Ny=1, Nz=1) -├── clock: Clock(time=0.000 s, iteration=0) +├── clock: Clock(time=0 seconds, iteration=0) └── parameters: (α = 3.14, N = 1.0, f = 0.1) ``` diff --git a/docs/src/model_setup/boundary_conditions.md b/docs/src/model_setup/boundary_conditions.md index 4925d64074..9131a3ff32 100644 --- a/docs/src/model_setup/boundary_conditions.md +++ b/docs/src/model_setup/boundary_conditions.md @@ -241,7 +241,7 @@ julia> T_bcs = TracerBoundaryConditions(grid, top = ValueBoundaryCondition(20), bottom = GradientBoundaryCondition(0.01)); julia> model = IncompressibleModel(grid=grid, boundary_conditions=(u=u_bcs, T=T_bcs)) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=16, Ny=16, Nz=16) ├── tracers: (:T, :S) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:T, :S),Tuple{Float64,Float64}}} diff --git a/docs/src/model_setup/buoyancy_and_equation_of_state.md b/docs/src/model_setup/buoyancy_and_equation_of_state.md index 56b727edde..57dc3c9978 100644 --- a/docs/src/model_setup/buoyancy_and_equation_of_state.md +++ b/docs/src/model_setup/buoyancy_and_equation_of_state.md @@ -21,7 +21,7 @@ end julia> grid = RegularCartesianGrid(size=(64, 64, 64), extent=(1, 1, 1)); julia> model = IncompressibleModel(grid=grid, buoyancy=nothing) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: (:T, :S) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:T, :S),Tuple{Float64,Float64}}} @@ -35,7 +35,7 @@ also pass`tracers = ()` to the model constructor. ```jldoctest buoyancy julia> model = IncompressibleModel(grid=grid, buoyancy=nothing, tracers=()) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: () ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(),Tuple{}}} @@ -50,7 +50,7 @@ constructor. Buoyancy `:b` must be included as a tracer, for example, ```jldoctest buoyancy julia> model = IncompressibleModel(grid=grid, buoyancy=BuoyancyTracer(), tracers=(:b)) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: (:b,) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:b,),Tuple{Float64}}} @@ -65,7 +65,7 @@ To evolve temperature $T$ and salinity $S$ and diagnose the buoyancy, you can pa ```jldoctest buoyancy julia> model = IncompressibleModel(grid=grid, buoyancy=SeawaterBuoyancy()) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: (:T, :S) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:T, :S),Tuple{Float64,Float64}}} @@ -86,7 +86,7 @@ SeawaterBuoyancy{Float64}: g = 1.3 └── equation of state: LinearEquationOfState{Float64}: α = 1.67e-04, β = 7.80e-04 julia> model = IncompressibleModel(grid=grid, buoyancy=buoyancy) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: (:T, :S) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:T, :S),Tuple{Float64,Float64}}} diff --git a/docs/src/model_setup/clock.md b/docs/src/model_setup/clock.md index 80f2f9292b..18c8b557b1 100644 --- a/docs/src/model_setup/clock.md +++ b/docs/src/model_setup/clock.md @@ -13,7 +13,7 @@ end ```jldoctest julia> clock = Clock(time=0.0) -Clock{Float64}: time = 0.000 s, iteration = 0, stage = 1 +Clock{Float64}: time = 0 seconds, iteration = 0, stage = 1 ``` but can be modified to start the model clock at some other time. @@ -21,7 +21,7 @@ For example, passing ```jldoctest julia> clock = Clock(time=3600.0) -Clock{Float64}: time = 1.000 hr, iteration = 0, stage = 1 +Clock{Float64}: time = 1 hour, iteration = 0, stage = 1 ``` to the constructor for `IncompressibleModel` causes the simulation diff --git a/docs/src/model_setup/diagnostics.md b/docs/src/model_setup/diagnostics.md index c4fbecec48..a2c59806c8 100644 --- a/docs/src/model_setup/diagnostics.md +++ b/docs/src/model_setup/diagnostics.md @@ -11,89 +11,3 @@ time or be specified at any later time and appended (or assigned with a key valu Most diagnostics can be run at specified frequencies (e.g. every 25 time steps) or specified intervals (e.g. every 15 minutes of simulation time). If you'd like to run a diagnostic on demand then do not specify any intervals (and do not add it to `simulation.diagnostics`). - -We describe the [`Average`](@ref) diagnostic in detail below but see the API documentation for other diagnostics such -as [`TimeSeries`](@ref), [`FieldMaximum`](@ref), [`CFL`](@ref), and [`NaNChecker`](@ref). - -## Horizontal averages - -You can create a horizontal `Average` diagnostic by passing a field to the constructor, e.g. - -```@meta -DocTestSetup = quote - using Oceananigans - using Oceananigans.Diagnostics -end -``` - -```jldoctest -julia> model = IncompressibleModel(grid=RegularCartesianGrid(size=(4, 4, 4), extent=(1, 1, 1))); - -julia> T_avg = Average(model.tracers.T, dims=(1, 2)); - -julia> T_avg(model) # Compute horizontal average of T on demand -1×1×6 Array{Float64,3}: -[:, :, 1] = - 0.0 - -[:, :, 2] = - 0.0 - -[:, :, 3] = - 0.0 - -[:, :, 4] = - 0.0 - -[:, :, 5] = - 0.0 - -[:, :, 6] = - 0.0 -``` - -which can then be called on-demand via `T_avg(model)` to return the horizontally averaged temperature. Notice that -halo regions are included in the output of the horizontal average. When running on the GPU you may want it to return -an `Array` instead of a `CuArray` in case you want to save the horizontal average to disk in which case you'd want to -construct it like - -```jldoctest -julia> model = IncompressibleModel(grid=RegularCartesianGrid(size=(4, 4, 4), extent=(1, 1, 1))); - -julia> T_avg = Average(model.tracers.T, dims=(1, 2), return_type=Array); - -julia> T_avg(model) # Will always return an Array -1×1×6 Array{Float64,3}: -[:, :, 1] = - 0.0 - -[:, :, 2] = - 0.0 - -[:, :, 3] = - 0.0 - -[:, :, 4] = - 0.0 - -[:, :, 5] = - 0.0 - -[:, :, 6] = - 0.0 -``` - -You can also use pass an abstract operator to take the horizontal average of any diagnosed quantity. For example, to -compute the horizontal average of the vertical component of vorticity: - -```julia -model = IncompressibleModel(grid=RegularCartesianGrid(size=(16, 16, 16), extent=(1, 1, 1))) -simulation = Simulation(model, Δt=6, stop_iteration=10) - -u, v, w = model.velocities -ζ = ∂x(v) - ∂y(u) -ζ_avg = Average(ζ, dims=(1, 2)) -simulation.diagnostics[:vorticity_profile] = ζ_avg -``` - -See [`Average`](@ref) for more details and options. diff --git a/docs/src/model_setup/output_writers.md b/docs/src/model_setup/output_writers.md index c95794b68e..59c9559efd 100644 --- a/docs/src/model_setup/output_writers.md +++ b/docs/src/model_setup/output_writers.md @@ -28,7 +28,7 @@ simulation = Simulation(model, Δt=12, stop_time=3600); fields = Dict("u" => model.velocities.u, "T" => model.tracers.T); simulation.output_writers[:field_writer] = - NetCDFOutputWriter(model, fields, filename="output_fields.nc", time_interval=60) + NetCDFOutputWriter(model, fields, filepath="output_fields.nc", time_interval=60) # output NetCDFOutputWriter (time_interval=60): output_fields.nc @@ -38,8 +38,8 @@ NetCDFOutputWriter (time_interval=60): output_fields.nc ```jldoctest netcdf1 simulation.output_writers[:surface_slice_writer] = - NetCDFOutputWriter(model, fields, filename="output_surface_xy_slice.nc", - time_interval=60, zC=grid.Nz, zF=grid.Nz+1) + NetCDFOutputWriter(model, fields, filepath="output_surface_xy_slice.nc", + time_interval=60, field_slicer=FieldSlicer(k=grid.Nz)) # output NetCDFOutputWriter (time_interval=60): output_surface_xy_slice.nc @@ -79,7 +79,7 @@ global_attributes = Dict("location" => "Bay of Fundy", "onions" => 7); simulation.output_writers[:stuff] = NetCDFOutputWriter(model, outputs, - iteration_interval=1, filename="stuff.nc", dimensions=dims, verbose=true, + iteration_interval=1, filepath="stuff.nc", dimensions=dims, verbose=true, global_attributes=global_attributes, output_attributes=output_attributes) # output @@ -114,7 +114,7 @@ function init_save_some_metadata(file, model) file["parameters/density"] = 1027 end -T_avg = Average(model.tracers.T, dims=(1, 2)) +T_avg = AveragedField(model.tracers.T, dims=(1, 2)) outputs = Dict( :w => model -> model.velocities.u, diff --git a/docs/src/model_setup/tracers.md b/docs/src/model_setup/tracers.md index e371010b4e..32ff6b5dad 100644 --- a/docs/src/model_setup/tracers.md +++ b/docs/src/model_setup/tracers.md @@ -13,7 +13,7 @@ end julia> grid = RegularCartesianGrid(size=(64, 64, 64), extent=(1, 1, 1)); julia> model = IncompressibleModel(grid=grid) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: (:T, :S) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:T, :S),Tuple{Float64,Float64}}} @@ -43,7 +43,7 @@ quantities $C_1$, CO₂, and nitrogen as additional passive tracers you could se ```jldoctest tracers julia> model = IncompressibleModel(grid=grid, tracers=(:T, :S, :C₁, :CO₂, :nitrogen)) -IncompressibleModel{CPU, Float64}(time = 0.000 s, iteration = 0) +IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) ├── grid: RegularCartesianGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) ├── tracers: (:T, :S, :C₁, :CO₂, :nitrogen) ├── closure: IsotropicDiffusivity{Float64,NamedTuple{(:T, :S, :C₁, :CO₂, :nitrogen),NTuple{5,Float64}}} diff --git a/examples/eady_turbulence.jl b/examples/eady_turbulence.jl index f8e23aa7e1..b6c4e5fdc1 100644 --- a/examples/eady_turbulence.jl +++ b/examples/eady_turbulence.jl @@ -95,10 +95,11 @@ # | $ κ_z $ | Laplacian vertical diffusivity | $ 10^{-2} $ | $ \mathrm{m^2 s^{-1}} $ | # | $ \varkappa_h $ | Biharmonic horizontal diffusivity | $ 10^{-2} \times \Delta x^4 / \mathrm{day} $ | $ \mathrm{m^4 s^{-1}} $ | # -# We start off by importing `Oceananigans` and some convenient aliases for dimensions: +# We start off by importing `Oceananigans`, some convenient aliases for dimensions, and a function +# that generates a pretty string from a number that represents 'time' in seconds: using Oceananigans, Printf -using Oceananigans.Utils: hour, day +using Oceananigans.Utils: hour, day, prettytime # ## The grid # diff --git a/examples/langmuir_turbulence.jl b/examples/langmuir_turbulence.jl index a4ba2c5bd3..a5103ffd7d 100644 --- a/examples/langmuir_turbulence.jl +++ b/examples/langmuir_turbulence.jl @@ -170,6 +170,7 @@ nothing # hide # maximum absolute value of $u, v, w$ and the current wall clock time. using Oceananigans.Diagnostics, Printf +using Oceananigans.Utils: prettytime umax = FieldMaximum(abs, model.velocities.u) vmax = FieldMaximum(abs, model.velocities.v) diff --git a/src/AbstractOperations/AbstractOperations.jl b/src/AbstractOperations/AbstractOperations.jl index 3ea81055fd..3ff44cbf6b 100644 --- a/src/AbstractOperations/AbstractOperations.jl +++ b/src/AbstractOperations/AbstractOperations.jl @@ -1,6 +1,6 @@ module AbstractOperations -export ∂x, ∂y, ∂z, @at, Computation, compute!, @unary, @binary, @multiary +export ∂x, ∂y, ∂z, @at, @unary, @binary, @multiary using Base: @propagate_inbounds @@ -20,7 +20,6 @@ using Oceananigans.Fields: gpufriendly using Oceananigans.Operators: interpolation_operator using Oceananigans.Architectures: device using Oceananigans.Models: AbstractModel -using Oceananigans.Diagnostics: Average, normalize_sum! import Oceananigans.Architectures: architecture import Oceananigans.Fields: data, compute! @@ -52,7 +51,6 @@ include("binary_operations.jl") include("multiary_operations.jl") include("derivatives.jl") -include("computations.jl") include("show_abstract_operations.jl") include("averages_of_operations.jl") diff --git a/src/AbstractOperations/computations.jl b/src/AbstractOperations/computations.jl deleted file mode 100644 index a42132df92..0000000000 --- a/src/AbstractOperations/computations.jl +++ /dev/null @@ -1,147 +0,0 @@ -using KernelAbstractions -using Oceananigans.Utils: work_layout -using Oceananigans.Fields -using Oceananigans.Diagnostics: dims_to_result_size -import Oceananigans.Diagnostics: run_diagnostic! - -import Oceananigans.Fields: location, total_size, compute! - -""" - Computation{T, R, O, G} - -Represents an operation performed over the elements of a field. -""" -struct Computation{T, R, O, G} - operation :: O - result :: R - grid :: G - return_type :: T -end - -""" - Computation(operation, result; return_type=Array) - -Returns a `Computation` representing an `operation` performed over the elements of -`operation.grid` and stored in `result`. `return_type` specifies the output type when the -`Computation` instances is called as a function. -""" -Computation(operation, result; return_type=Array) = - Computation(operation, result, operation.grid, return_type) - -# Utilities for limited field-like behavior -architecture(comp::Computation) = architecture(comp.result) -Base.parent(comp::Computation) = comp # this enables taking a "horizontal average" of a computation -location(comp::Computation) = location(comp.operation) -total_size(comp::Computation) = total_size(comp.result) - -""" - compute!(computation::Computation) - -Perform a `computation`. The result is stored in `computation.result`. -""" -function compute!(computation::Computation) - arch = architecture(computation.result) - result_data = data(computation.result) - loc = location(computation.result) - - workgroup, worksize = work_layout(computation.grid, :xyz, include_right_boundaries=true, location=loc) - - compute_kernel! = _compute!(device(arch), workgroup, worksize) - - event = compute_kernel!(result_data, computation.grid, computation.operation; dependencies=Event(device(arch))) - - wait(device(arch), event) - - return nothing -end - -"""Compute an `operation` over `grid` and store in `result`.""" -@kernel function _compute!(result, grid, operation) - i, j, k = @index(Global, NTuple) - @inbounds result[i, j, k] = operation[i, j, k] -end - -""" - (computation::Computation)(args...) - -Performs the `compute(computation)` and returns the result if `isnothing(return_type)`, -or the result after being converted to `return_type`. -""" -function (computation::Computation)(args...) - compute!(computation) - return computation.return_type(interior(computation.result)) -end - -function (computation::Computation{<:Nothing})(args...) - compute!(computation) - return computation.result -end - -##### -##### Functionality for using computations with Average -##### - -""" - Average(op::AbstractOperation, result; dims, kwargs...) - -Returns the representation of an `Average` over the operation `op`, using `result` as -a temporary array to store the result of `operation` computed on `op.grid`. -""" -function Average(op::AbstractOperation, result; dims, kwargs...) - computation = Computation(op, result) - return Average(computation; dims=dims, kwargs...) -end - -""" - Average(op::AbstractOperation, model; dims, kwargs...) - -Returns the representation of an `Average` over the operation `op`, using -`model.pressures.pHY′` as a temporary array to store the result of `operation` computed on -`op.grid`. -""" -Average(op::AbstractOperation, model::AbstractModel; dims, kwargs...) = - Average(op, model.pressures.pHY′; dims=dims, kwargs...) - -""" - Average(field::Field; dims, iteration_interval=nothing, time_interval=nothing, return_type=Array) - -Construct an `Average` of `field` along the dimensions specified by the tuple `dims`. - -After the average is computed it will be stored in the `result` property. - -The `Average` can be used as a callable object that computes and returns the average. - -An `iteration_interval` or `time_interval` (or both) can be passed to indicate how often to -run this diagnostic if it is part of `simulation.diagnostics`. `iteration_interval` is a -number of iterations while `time_interval` is a time interval in units of `model.clock.time`. - -A `return_type` can be used to specify the type returned when the `Average` is -used as a callable object. The default `return_type=Array` is useful when running a GPU -model and you want to save the output to disk by passing it to an output writer. -""" -function Average(field::Computation; dims, iteration_interval=nothing, time_interval=nothing, return_type=Array, - with_halos=true) - - dims isa Union{Int, Tuple} || error("Average dims must be an integer or tuple!") - dims isa Int && (dims = tuple(dims)) - - length(dims) == 0 && error("dims is empty! Must average over at least one dimension.") - length(dims) > 3 && error("Models are 3-dimensional. Cannot average over 4+ dimensions.") - all(1 <= d <= 3 for d in dims) || error("Dimensions must be one of 1, 2, 3.") - - arch = architecture(field) - result_size = dims_to_result_size(field, dims, field.grid) - result = zeros(arch, field.grid, result_size...) - - return Average(field, dims, result, iteration_interval, time_interval, 0.0, - return_type, with_halos) -end - -"""Compute the average of a computation.""" -function run_diagnostic!(avg::Average{<:Computation}, model) - compute!(avg.field) - zero_halo_regions!(parent(avg.field.result), (Cell, Cell, Cell), model.grid) - sum!(avg.result, parent(avg.field.result)) - normalize_sum!(avg) - return nothing -end diff --git a/src/Diagnostics/Diagnostics.jl b/src/Diagnostics/Diagnostics.jl index f41e80f04f..2a9477d35d 100644 --- a/src/Diagnostics/Diagnostics.jl +++ b/src/Diagnostics/Diagnostics.jl @@ -1,7 +1,7 @@ module Diagnostics export - Average, TimeSeries, FieldMaximum, + TimeSeries, FieldMaximum, CFL, AdvectiveCFL, DiffusiveCFL, NaNChecker, run_diagnostic! @@ -11,7 +11,6 @@ using Oceananigans.Operators using Oceananigans: AbstractDiagnostic include("nan_checker.jl") -include("average.jl") include("time_series.jl") include("field_maximum.jl") include("cfl.jl") diff --git a/src/Diagnostics/average.jl b/src/Diagnostics/average.jl deleted file mode 100644 index 32f2e7621a..0000000000 --- a/src/Diagnostics/average.jl +++ /dev/null @@ -1,100 +0,0 @@ -using Oceananigans.Architectures -using Oceananigans.BoundaryConditions -using Oceananigans.Fields -using Oceananigans.Utils - -using Oceananigans.Grids: topology, total_size, halo_size, interior_parent_indices - -""" - Average{F, R, D, P, I, T} <: AbstractDiagnostic - -A diagnostic for computing the averages of a field along particular dimensions. -""" -mutable struct Average{F, R, D, P, I, T} <: AbstractDiagnostic - field :: F - dims :: D - result :: P - iteration_interval :: I - time_interval :: T - previous :: Float64 - return_type :: R - with_halos :: Bool -end - -function dims_to_result_size(field, dims, grid) - field_size = total_size(parent(field)) - return Tuple(d in dims ? 1 : field_size[d] for d in 1:3) -end - -""" - Average(field::Field; dims, iteration_interval=nothing, time_interval=nothing, return_type=Array) - -Construct an `Average` of `field` along the dimensions specified by the tuple `dims`. - -After the average is computed it will be stored in the `result` property. - -The `Average` can be used as a callable object that computes and returns the average. - -An `iteration_interval` or `time_interval` (or both) can be passed to indicate how often to -run this diagnostic if it is part of `simulation.diagnostics`. `iteration_interval` is a -number of iterations while `time_interval` is a time interval in units of `model.clock.time`. - -A `return_type` can be used to specify the type returned when the `Average` is -used as a callable object. The default `return_type=Array` is useful when running a GPU -model and you want to save the output to disk by passing it to an output writer. -""" -function Average(field::Field; dims, iteration_interval=nothing, time_interval=nothing, return_type=Array, - with_halos=true) - - dims isa Union{Int, Tuple} || error("Average dims must be an integer or tuple!") - dims isa Int && (dims = tuple(dims)) - - length(dims) == 0 && error("dims is empty! Must average over at least one dimension.") - length(dims) > 3 && error("Models are 3-dimensional. Cannot average over 4+ dimensions.") - all(1 <= d <= 3 for d in dims) || error("Dimensions must be one of 1, 2, 3.") - - arch = architecture(field) - result_size = dims_to_result_size(field, dims, field.grid) - result = zeros(arch, field.grid, result_size...) - - return Average(field, dims, result, iteration_interval, time_interval, 0.0, - return_type, with_halos) -end - -""" - normalize_sum!(avg) - -Normalize the sum by the number of grid points averaged over to get the average. -""" -function normalize_sum!(avg) - N = size(avg.field.grid) - avg.result ./= prod(N[d] for d in avg.dims) - return nothing -end - -""" - run_diagnostic!(avg::Average, model) - -Compute the horizontal average of `avg.field` and store the result in `avg.result`. -""" -function run_diagnostic!(avg::Average, model) - zero_halo_regions!(parent(avg.field), location(avg.field), avg.field.grid) - sum!(avg.result, parent(avg.field)) - normalize_sum!(avg) - return nothing -end - -function (avg::Average)(model) - run_diagnostic!(avg, model) - result = avg.return_type(avg.result) - - if avg.with_halos - return result - else - loc = location(avg.field) - topo = topology(model.grid) - N, H = size(model.grid), halo_size(model.grid) - inds = (d in avg.dims ? Colon() : interior_parent_indices(loc[d], topo[d], N[d], H[d]) for d in 1:3) - return result[inds...] - end -end diff --git a/src/Fields/abstract_field.jl b/src/Fields/abstract_field.jl index 4951aa25e4..5fce77d263 100644 --- a/src/Fields/abstract_field.jl +++ b/src/Fields/abstract_field.jl @@ -67,7 +67,7 @@ Computes `field.data` if `time != field.status.time`. """ function conditional_compute!(field, time) - if time != field.status.time + if time == zero(time) || time != field.status.time compute!(field) field.status.time = time end diff --git a/src/Oceananigans.jl b/src/Oceananigans.jl index 65f2d914c7..9dc821a779 100644 --- a/src/Oceananigans.jl +++ b/src/Oceananigans.jl @@ -16,7 +16,7 @@ export RegularCartesianGrid, VerticallyStretchedCartesianGrid, # Advection schemes - CenteredSecondOrder, CenteredFourthOrder, WENO5, + CenteredSecondOrder, CenteredFourthOrder, UpwindBiasedThirdOrder, UpwindBiasedFifthOrder, WENO5, # Boundary conditions BoundaryCondition, @@ -44,24 +44,23 @@ export # Surface waves via Craik-Leibovich equations SurfaceWaves, - # Time stepping - time_step!, - TimeStepWizard, update_Δt!, + # Turbulence closures + IsotropicDiffusivity, AnisotropicDiffusivity, + AnisotropicBiharmonicDiffusivity, + ConstantSmagorinsky, AnisotropicMinimumDissipation, # Models IncompressibleModel, NonDimensionalModel, Clock, + # Time stepping + time_step!, TimeStepWizard, + # Simulations Simulation, run!, iteration_limit_exceeded, stop_time_exceeded, wall_time_limit_exceeded, - # Utilities - prettytime, pretty_filesize, - - # Turbulence closures - IsotropicDiffusivity, AnisotropicDiffusivity, - AnisotropicBiharmonicDiffusivity, - ConstantSmagorinsky, AnisotropicMinimumDissipation + # Output writers + FieldSlicer, NetCDFOutputWriter, JLD2OutputWriter, Checkpointer, restore_from_checkpoint # Standard library modules using Printf diff --git a/src/OutputWriters/OutputWriters.jl b/src/OutputWriters/OutputWriters.jl index 511f190d1e..3fbb121a0c 100644 --- a/src/OutputWriters/OutputWriters.jl +++ b/src/OutputWriters/OutputWriters.jl @@ -4,7 +4,7 @@ export write_output!, FieldSlicer, JLD2OutputWriter, - NetCDFOutputWriter, write_grid_and_attributes, + NetCDFOutputWriter, Checkpointer, restore_from_checkpoint, WindowedTimeAverage diff --git a/src/OutputWriters/field_slicer.jl b/src/OutputWriters/field_slicer.jl index 137297b4e3..e078ee9ccd 100644 --- a/src/OutputWriters/field_slicer.jl +++ b/src/OutputWriters/field_slicer.jl @@ -3,11 +3,11 @@ Slices fields along indices with or without halo regions as specified. """ -struct FieldSlicer{I, J, K, W} +struct FieldSlicer{I, J, K} i :: I j :: J k :: K - with_halos :: W + with_halos :: Bool end """ diff --git a/src/OutputWriters/jld2_output_writer.jl b/src/OutputWriters/jld2_output_writer.jl index 1d206cc20c..0c93268ca8 100644 --- a/src/OutputWriters/jld2_output_writer.jl +++ b/src/OutputWriters/jld2_output_writer.jl @@ -33,13 +33,14 @@ noinit(args...) = nothing time_interval = nothing, time_averaging_window = nothing, time_averaging_stride = 1, + field_slicer = FieldSlicer(), + array_type = Array{Float32}, max_filesize = Inf, force = false, init = noinit, including = [:grid, :coriolis, :buoyancy, :closure], verbose = false, part = 1, - array_type = Array{Float32}, jld2_kw = Dict{Symbol, Any}()) Construct a `JLD2OutputWriter` for an Oceananigans `model` that writes `label, output` pairs diff --git a/src/OutputWriters/netcdf_output_writer.jl b/src/OutputWriters/netcdf_output_writer.jl index 3c6a74c6ef..a4f72376a7 100644 --- a/src/OutputWriters/netcdf_output_writer.jl +++ b/src/OutputWriters/netcdf_output_writer.jl @@ -1,16 +1,29 @@ -using Dates: now using NCDatasets using Oceananigans.Fields -using Oceananigans.Fields: cpudata -using Oceananigans.Diagnostics: Average + +using Dates: now +using Oceananigans.Grids: topology, halo_size using Oceananigans.Utils: validate_intervals, versioninfo_with_gpu, oceananigans_versioninfo -using Oceananigans.Grids: topology, interior_x_indices, interior_y_indices, interior_z_indices, - all_x_indices, all_y_indices, all_z_indices -# Possibly should -collect_face_nodes(topo, ξF) = collect(ξF)[1:end-1] -collect_face_nodes(::Bounded, ξF) = collect(ξF) +xdim(::Type{Face}) = "xF" +ydim(::Type{Face}) = "yF" +zdim(::Type{Face}) = "zF" + +xdim(::Type{Cell}) = "xC" +ydim(::Type{Cell}) = "yC" +zdim(::Type{Cell}) = "zC" + +netcdf_spatial_dimensions(::Field{LX, LY, LZ}) where {LX, LY, LZ} = xdim(LX), ydim(LY), zdim(LZ) + +const default_dimension_attributes = Dict( + "xC" => Dict("longname" => "Locations of the cell centers in the x-direction.", "units" => "m"), + "xF" => Dict("longname" => "Locations of the cell faces in the x-direction.", "units" => "m"), + "yC" => Dict("longname" => "Locations of the cell centers in the y-direction.", "units" => "m"), + "yF" => Dict("longname" => "Locations of the cell faces in the y-direction.", "units" => "m"), + "zC" => Dict("longname" => "Locations of the cell centers in the z-direction.", "units" => "m"), + "zF" => Dict("longname" => "Locations of the cell faces in the z-direction.", "units" => "m") +) const default_output_attributes = Dict( "u" => Dict("longname" => "Velocity in the x-direction", "units" => "m/s"), @@ -26,36 +39,63 @@ const default_output_attributes = Dict( An output writer for writing to NetCDF files. """ -mutable struct NetCDFOutputWriter{D, O, I, T, S} <: AbstractOutputWriter - filename :: String +mutable struct NetCDFOutputWriter{D, O, I, T, S, A} <: AbstractOutputWriter + filepath :: String dataset :: D outputs :: O iteration_interval :: I time_interval :: T mode :: String - slices :: S + field_slicer :: S + array_type :: A previous :: Float64 verbose :: Bool end """ - NetCDFOutputWriter(model, outputs; filename, iteration_interval=nothing, time_interval=nothing, - global_attributes=Dict(), output_attributes=Dict(), dimensions=Dict(), - mode="c", compression=0, with_halos=false, verbose=false, slice_kwargs...) +function NetCDFOutputWriter(model, outputs; filepath, + iteration_interval = nothing, + time_interval = nothing, + time_averaging_window = nothing, + time_averaging_stride = 1, + array_type = Array{Float32}, + field_slicer = FieldSlicer(), + global_attributes = Dict(), + output_attributes = Dict(), + dimensions = Dict(), + mode = "c", + compression = 0, + verbose = false) Construct a `NetCDFOutputWriter` that writes `(label, output)` pairs in `outputs` (which should be a `Dict`) to a NetCDF file, where `label` is a string that labels the output and `output` is -either a field from the model (e.g. `model.velocities.u`) or a function `f(model)` that returns -something to be written to disk. Custom output requires the spatial `dimensions` to be manually -specified. +either a `Field` (e.g. `model.velocities.u` or an `AveragedField`) or a function `f(model)` that +returns something to be written to disk. Custom output requires the spatial `dimensions` (a +`Dict`) to be manually specified (see examples). Keyword arguments ================= +- `filepath` (required): Filepath to save output to. + - `iteration_interval`: Save output every `n` model iterations. - `time_interval`: Save output every `t` units of model clock time. -- `filename`: Filepath to save output to. +- `time_averaging_window`: Specifies a time window over which each member of `output` is averaged before + being saved. For this each member of output is converted to `Oceananigans.Diagnostics.WindowedTimeAverage`. + Default `nothing` indicates no averaging. + +- `time_averaging_stride`: Specifies a iteration 'stride' between the calculation of each `output` during + time-averaging. Longer strides means that output is calculated less frequently, and that the resulting + time-average is faster to compute, but less accurate. Default: 1. + +- `array_type`: The array type to which output arrays are converted to prior to saving. + Default: Array{Float32}. + +- `field_slicer`: An object for slicing field output in ``(x, y, z)``, including omitting halos. + Has no effect on output that is not a field. `field_slicer = nothing` means + no slicing occurs, so that all field data, including halo regions, is saved. + Default: FieldSlicer(), which slices halo regions. - `global_attributes`: Dict of model properties to save with every file (deafult: `Dict()`) @@ -96,7 +136,7 @@ simulation = Simulation(model, Δt=12, stop_time=3600); fields = Dict("u" => model.velocities.u, "T" => model.tracers.T); simulation.output_writers[:field_writer] = - NetCDFOutputWriter(model, fields, filename="fields.nc", time_interval=60) + NetCDFOutputWriter(model, fields, filepath="fields.nc", time_interval=60) # output NetCDFOutputWriter (time_interval=60): fields.nc @@ -106,8 +146,8 @@ NetCDFOutputWriter (time_interval=60): fields.nc ```jldoctest netcdf1 simulation.output_writers[:surface_slice_writer] = - NetCDFOutputWriter(model, fields, filename="surface_xy_slice.nc", - time_interval=60, zC=grid.Nz, zF=grid.Nz+1) + NetCDFOutputWriter(model, fields, filepath="surface_xy_slice.nc", + time_interval=60, field_slicer=FieldSlicer(k=grid.Nz)) # output NetCDFOutputWriter (time_interval=60): surface_xy_slice.nc @@ -147,7 +187,7 @@ global_attributes = Dict("location" => "Bay of Fundy", "onions" => 7); simulation.output_writers[:things] = NetCDFOutputWriter(model, outputs, - iteration_interval=1, filename="things.nc", dimensions=dims, verbose=true, + iteration_interval=1, filepath="things.nc", dimensions=dims, verbose=true, global_attributes=global_attributes, output_attributes=output_attributes) # output @@ -156,94 +196,118 @@ NetCDFOutputWriter (iteration_interval=1): things.nc └── 3 outputs: ["profile", "slice", "scalar"] ``` """ -function NetCDFOutputWriter(model, outputs; filename, - iteration_interval = nothing, - time_interval = nothing, - global_attributes = Dict(), - output_attributes = Dict(), - dimensions = Dict(), - mode = "c", - compression = 0, - with_halos = false, - verbose = false, - xC = with_halos ? all_x_indices(Cell, model.grid) : interior_x_indices(Cell, model.grid), - xF = with_halos ? all_x_indices(Face, model.grid) : interior_x_indices(Face, model.grid), - yC = with_halos ? all_y_indices(Cell, model.grid) : interior_y_indices(Cell, model.grid), - yF = with_halos ? all_y_indices(Face, model.grid) : interior_y_indices(Face, model.grid), - zC = with_halos ? all_z_indices(Cell, model.grid) : interior_z_indices(Cell, model.grid), - zF = with_halos ? all_z_indices(Face, model.grid) : interior_z_indices(Face, model.grid) - ) +function NetCDFOutputWriter(model, outputs; filepath, + iteration_interval = nothing, + time_interval = nothing, + time_averaging_window = nothing, + time_averaging_stride = 1, + array_type = Array{Float32}, + field_slicer = FieldSlicer(), + global_attributes = Dict(), + output_attributes = Dict(), + dimensions = Dict(), + mode = "c", + compression = 0, + verbose = false) validate_intervals(iteration_interval, time_interval) - # Ensure we can add metadata to the global attributes later by converting to pairs of type {Any, Any}. - global_attributes = Dict{Any, Any}(k => v for (k, v) in global_attributes) + # Convert each output to WindowedTimeAverage if time_averaging_window is specified + if !isnothing(time_averaging_window) - # Generates a dictionary with keys "xC", "xF", etc, whose values give the slices to be saved. - slice_keywords = Dict(name => a for (name, a) in zip(("xC", "yC", "zC", "xF", "yF", "zF"), - ( xC, yC, zC, xF, yF, zF ))) + !isnothing(iteration_interval) && error("Cannot specify iteration_interval with time_averaging_window.") - # Allow values to be of Any type as OffsetArrays may get modified below. - grid = model.grid - dims = Dict{String,Any}( - "xC" => with_halos ? grid.xC : collect(xnodes(Cell, grid)), - "xF" => with_halos ? grid.xF : collect(xnodes(Face, grid)), - "yC" => with_halos ? grid.yC : collect(ynodes(Cell, grid)), - "yF" => with_halos ? grid.yF : collect(ynodes(Face, grid)), - "zC" => with_halos ? grid.zC : collect(znodes(Cell, grid)), - "zF" => with_halos ? grid.zF : collect(znodes(Face, grid)) - ) + outputs = Dict(name => WindowedTimeAverage(outputs[name], model, time_interval = time_interval, + time_window = time_averaging_window, stride = time_averaging_stride, + field_slicer = field_slicer) + for name in keys(outputs)) + end - dim_attribs = Dict( - "xC" => Dict("longname" => "Locations of the cell centers in the x-direction.", "units" => "m"), - "xF" => Dict("longname" => "Locations of the cell faces in the x-direction.", "units" => "m"), - "yC" => Dict("longname" => "Locations of the cell centers in the y-direction.", "units" => "m"), - "yF" => Dict("longname" => "Locations of the cell faces in the y-direction.", "units" => "m"), - "zC" => Dict("longname" => "Locations of the cell centers in the z-direction.", "units" => "m"), - "zF" => Dict("longname" => "Locations of the cell faces in the z-direction.", "units" => "m") - ) + # Ensure we can add any kind of metadata to the global attributes later by converting to pairs of type {Any, Any}. + global_attributes = Dict{Any, Any}(k => v for (k, v) in global_attributes) - # Add useful metadata as global attributes + # Add useful metadata global_attributes["date"] = "This file was generated on $(now())." global_attributes["Julia"] = "This file was generated using " * versioninfo_with_gpu() global_attributes["Oceananigans"] = "This file was generated using " * oceananigans_versioninfo() - # Slice coordinate arrays stored in the dims dict - for (dim, indices) in slice_keywords - dim = string(dim) # convert symbol to string - dims[dim] = dims[dim][get_slice(indices)] # overwrite entries in dims Dict + # Add useful metadata about intervals and time averaging + if isnothing(time_averaging_window) + if !isnothing(iteration_interval) + global_attributes["iteration_interval"] = iteration_interval + global_attributes["output iteration interval"] = + "Output was saved every $iteration_interval iteration(s)." + end + + if !isnothing(time_interval) + global_attributes["time_interval"] = time_interval + global_attributes["output time interval"] = + "Output was saved every $(prettytime(time_interval))." + end + else + global_attributes["time_interval"] = time_interval + global_attributes["output time interval"] = + "Output was time averaged and saved every $(prettytime(time_interval))." + + global_attributes["time_averaging_window"] = time_averaging_window + global_attributes["time averaging window"] = + "Output was time averaged with a window size of $(prettytime(time_averaging_window))" + + if time_averaging_stride != 1 + global_attributes["time_averaging_stride"] = time_averaging_stride + global_attributes["time averaging stride"] = + "Output was time averaged with a stride of $time_averaging_stride iteration(s) within the time averaging window." + end end + grid = model.grid + Nx, Ny, Nz = size(grid) + Hx, Hy, Hz = halo_size(grid) + TX, TY, TZ = topology(grid) + + dims = Dict( + "xC" => grid.xC.parent[parent_slice_indices(Cell, TX, Nx, Hx, field_slicer.i, field_slicer.with_halos)], + "xF" => grid.xF.parent[parent_slice_indices(Face, TX, Nx, Hx, field_slicer.i, field_slicer.with_halos)], + "yC" => grid.yC.parent[parent_slice_indices(Cell, TY, Ny, Hy, field_slicer.j, field_slicer.with_halos)], + "yF" => grid.yF.parent[parent_slice_indices(Face, TY, Ny, Hy, field_slicer.j, field_slicer.with_halos)], + "zC" => grid.zC.parent[parent_slice_indices(Cell, TZ, Nz, Hz, field_slicer.k, field_slicer.with_halos)], + "zF" => grid.zF.parent[parent_slice_indices(Face, TZ, Nz, Hz, field_slicer.k, field_slicer.with_halos)] + ) + # Open the NetCDF dataset file - dataset = Dataset(filename, mode, attrib=global_attributes) + dataset = Dataset(filepath, mode, attrib=global_attributes) # Define variables for each dimension and attributes if this is a new file. if mode == "c" for (dim_name, dim_array) in dims defVar(dataset, dim_name, dim_array, (dim_name,), - compression=compression, attrib=dim_attribs[dim_name]) + compression=compression, attrib=default_dimension_attributes[dim_name]) end - # Creates an unliimited dimension "time" + # Creates an unlimited dimension "time" defDim(dataset, "time", Inf) - defVar(dataset, "time", Float64, ("time",)) + defVar(dataset, "time", typeof(model.clock.time), ("time",)) - # Ensure we have an attribute for every output. Use reasonable defaults if - # none were specified by the user. + # Use default output attributes for known outputs if the user has not specified any. + # Unknown outputs get an empty tuple (no output attributes). for c in keys(outputs) if !haskey(output_attributes, c) - output_attributes[c] = default_output_attributes[c] + output_attributes[c] = c in keys(default_output_attributes) ? default_output_attributes[c] : () end end - # Initiates empty variables for fields from the user-supplied variable outputs + # Initiates empty variables for fields from the user-supplied `outputs`. for (name, output) in outputs if output isa Field - FT = eltype(output.grid) - defVar(dataset, name, FT, (netcdf_spatial_dimensions(output)..., "time"), + defVar(dataset, name, eltype(array_type), (netcdf_spatial_dimensions(output)..., "time"), + compression=compression, attrib=output_attributes[name]) + elseif output isa WindowedTimeAverage && output.operand isa Field + defVar(dataset, name, eltype(array_type), (netcdf_spatial_dimensions(output.operand)..., "time"), compression=compression, attrib=output_attributes[name]) else - defVar(dataset, name, Float64, (dimensions[name]..., "time"), + name ∉ keys(dimensions) && error("Custom output $name needs dimensions!") + + defVar(dataset, name, eltype(array_type), (dimensions[name]..., "time"), compression=compression, attrib=output_attributes[name]) end end @@ -251,82 +315,21 @@ function NetCDFOutputWriter(model, outputs; filename, sync(dataset) end - # extract outputs whose values are Fields - field_outputs = filter(o -> o.second isa Field, outputs) - - # Store a slice specification for each field. - slices = Dict(name => slice_indices(field; xC=xC, yC=yC, zC=zC, xF=xF, yF=yF, zF=zF) - for (name, field) in field_outputs) - - return NetCDFOutputWriter(filename, dataset, outputs, iteration_interval, time_interval, - mode, slices, 0.0, verbose) + return NetCDFOutputWriter(filepath, dataset, outputs, iteration_interval, time_interval, + mode, field_slicer, array_type, 0.0, verbose) end -Base.open(ow::NetCDFOutputWriter) = Dataset(ow.filename, "a") +Base.open(ow::NetCDFOutputWriter) = Dataset(ow.filepath, "a") Base.close(ow::NetCDFOutputWriter) = close(ow.dataset) -""" - netcdf_spatial_dimensions(::field) - -Returns the NetCDF dimensions associated with a field. - -Examples -======== -julia> netcdf_spatial_dimensions(model.velocities.u) -("xF", "yC", "zC") - -julia> netcdf_spatial_dimensions(model.tracers.T) -("xC", "yC", "zC") -""" -netcdf_spatial_dimensions(::Field{LX, LY, LZ}) where {LX, LY, LZ} = xdim(LX), ydim(LY), zdim(LZ) - -xdim(::Type{Face}) = "xF" -ydim(::Type{Face}) = "yF" -zdim(::Type{Face}) = "zF" - -xdim(::Type{Cell}) = "xC" -ydim(::Type{Cell}) = "yC" -zdim(::Type{Cell}) = "zC" - -# This function allows users to specify slices with integers; eg xC=3. -# Note: size(a[3:3, :, :]) = (1, Ny, Nz) versus size(a[3, :, :]) = (Ny, Nz) -get_slice(n::Integer) = n:n -get_slice(n::UnitRange) = n - -""" - slice_indices(field; slice_specs...) - -Returns an array of indices that specify a view over a field's data. -""" -slice_indices(field; slice_specs...) = - [get_slice(slice_specs[Symbol(dim)]) for dim in netcdf_spatial_dimensions(field)] - -function save_output_to_netcdf!(nc, model, f::Field, name, time_index) - data = cpudata(f) # Transfer data to CPU if parent(output) is a CuArray - nc.dataset[name][:, :, :, time_index] = view(data, nc.slices[name]...) -end - -function save_output_to_netcdf!(nc, model, avg::Average, name, time_index) - data = avg(model) - data = dropdims(data, dims=avg.dims) - colons = Tuple(Colon() for _ in 1:ndims(data)) - nc.dataset[name][colons..., time_index] = data -end - -function save_output_to_netcdf!(nc, model, output, name, time_index) - data = output(model) - colons = Tuple(Colon() for _ in 1:ndims(data)) - nc.dataset[name][colons..., time_index] = data -end - """ write_output!(output_writer, model) -Writes output to netcdf file `output_writer.filename` at specified intervals. Increments the `time` dimension +Writes output to netcdf file `output_writer.filepath` at specified intervals. Increments the `time` dimension every time an output is written to the file. """ function write_output!(ow::NetCDFOutputWriter, model) - ds, verbose, filepath = ow.dataset, ow.verbose, ow.filename + ds, verbose, filepath = ow.dataset, ow.verbose, ow.filepath time_index = length(ds["time"]) + 1 ds["time"][time_index] = model.clock.time @@ -343,7 +346,16 @@ function write_output!(ow::NetCDFOutputWriter, model) # Time before computing this output. verbose && (t0′ = time_ns()) - save_output_to_netcdf!(ow, model, output, name, time_index) + data = fetch_and_convert_output(output, model, ow) + + if output isa AveragedField + data = dropdims(data, dims=output.dims) + elseif output isa WindowedTimeAverage && output.operand isa AveragedField + data = dropdims(data, dims=output.operand.dims) + end + + colons = Tuple(Colon() for _ in 1:ndims(data)) + ds[name][colons..., time_index] = data if verbose # Time after computing this output. @@ -375,7 +387,7 @@ function Base.show(io::IO, ow::NetCDFOutputWriter) dims = join([dim * "(" * string(length(ow.dataset[dim])) * "), " for dim in keys(ow.dataset.dim)])[1:end-2] - print(io, "NetCDFOutputWriter $freq_int: $(ow.filename)\n", + print(io, "NetCDFOutputWriter $freq_int: $(ow.filepath)\n", "├── dimensions: $dims\n", "└── $(length(ow.outputs)) outputs: $(keys(ow.outputs))") end diff --git a/src/OutputWriters/windowed_time_average.jl b/src/OutputWriters/windowed_time_average.jl index 828e61e3dd..3acab11794 100644 --- a/src/OutputWriters/windowed_time_average.jl +++ b/src/OutputWriters/windowed_time_average.jl @@ -35,8 +35,7 @@ Returns an object for computing running averages of `operand` over `time_window` recurring on `time_interval`. During the collection period, averages are computed every `stride` iteration. -`operand` may be an `Oceananigans.Field`, `Oceananigans.AbstractOperations.Computation, -or `Oceananigans.Diagnostics.Average`. +`operand` may be a `Oceananigans.Field` or a subtype of it. Calling `wta(model)` for `wta::WindowedTimeAverage` object returns `wta.result`. """ diff --git a/src/Utils/pretty_time.jl b/src/Utils/pretty_time.jl index b34904751c..af609ec917 100644 --- a/src/Utils/pretty_time.jl +++ b/src/Utils/pretty_time.jl @@ -1,35 +1,53 @@ using Printf using Dates: AbstractTime +maybe_int(t) = isinteger(t) ? Int(t) : t + """ prettytime(t) Convert a floating point value `t` representing an amount of time in seconds to a more human-friendly formatted string with three decimal places. Depending on the value of `t` the string will be formatted to show `t` in nanoseconds (ns), microseconds (μs), -milliseconds (ms), seconds (s), minutes (min), hours (hr), or days (day). +milliseconds (ms), seconds, minutes, hours, days, or years. """ function prettytime(t) # Modified from: https://github.com/JuliaCI/BenchmarkTools.jl/blob/master/src/trials.jl - iszero(t) && return "0.000 s" + iszero(t) && return "0 seconds" + + t = maybe_int(t) - if t < 1e-6 + if t < 1e-9 + # No point going to picoseconds, just return something readable. + return @sprintf("%.3e seconds", t) + elseif t < 1e-6 value, units = t * 1e9, "ns" elseif t < 1e-3 value, units = t * 1e6, "μs" elseif t < 1 value, units = t * 1e3, "ms" elseif t < minute - value, units = t, "s" + value = t + units = value == 1 ? "second" : "seconds" elseif t < hour - value, units = t / minute, "min" + value = maybe_int(t / minute) + units = value == 1 ? "minute" : "minutes" elseif t < day - value, units = t / hour, "hr" + value = maybe_int(t / hour) + units = value == 1 ? "hour" : "hours" + elseif t < year + value = maybe_int(t / day) + units = value == 1 ? "day" : "days" else - value, units = t / day, "day" + value = maybe_int(t / year) + units = value == 1 ? "year" : "years" end - return @sprintf("%.3f", value) * " " * units + if isinteger(value) + return @sprintf("%d %s", value, units) + else + return @sprintf("%.3f %s", value, units) + end end prettytime(dt::AbstractTime) = "$dt" diff --git a/test/runtests.jl b/test/runtests.jl index a5b7f332ba..ea2998b537 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -42,8 +42,6 @@ using KernelAbstractions: @kernel, @index, Event import Oceananigans.Fields: interior import Oceananigans.Utils: launch!, datatuple -using Oceananigans.AbstractOperations: Computation, compute! - Logging.global_logger(OceananigansLogger()) ##### @@ -91,6 +89,7 @@ group = get(ENV, "TEST_GROUP", :all) |> Symbol include("test_buoyancy.jl") include("test_surface_waves.jl") include("test_weno_reconstruction.jl") + include("test_utils.jl") end end diff --git a/test/test_diagnostics.jl b/test/test_diagnostics.jl index 9731222cc4..14eace41cd 100644 --- a/test/test_diagnostics.jl +++ b/test/test_diagnostics.jl @@ -1,98 +1,3 @@ -using Oceananigans.Grids: halo_size - -function run_horizontal_average_tests(arch, FT) - topo = (Periodic, Periodic, Bounded) - Nx = Ny = Nz = 4 - grid = RegularCartesianGrid(topology=topo, size=(Nx, Ny, Nz), extent=(100, 100, 100)) - Hx, Hy, Hz = halo_size(grid) - - model = IncompressibleModel(grid=grid, architecture=arch, float_type=FT) - - linear(x, y, z) = z - set!(model, T=linear, w=linear) - - T̅ = Average(model.tracers.T, dims=(1, 2), with_halos=false) - computed_T_profile = T̅(model) - @test_skip size(computed_T_profile) == (1, 1, Nz) - @test_skip computed_T_profile ≈ znodes(Cell, grid, reshape=true) - - T̅ = Average(model.tracers.T, dims=(1, 2), with_halos=true) - computed_T_profile_with_halos = T̅(model) - @test_skip size(computed_T_profile_with_halos) == (1, 1, Nz+2Hz) - @test_skip computed_T_profile_with_halos[1+Hz:Nz+Hz] ≈ znodes(Cell, grid) - - w̅ = Average(model.velocities.w, dims=(1, 2), with_halos=false) - computed_w_profile = w̅(model) - @test_skip size(computed_w_profile) == (1, 1, Nz+1) - @test_skip computed_w_profile ≈ znodes(Face, grid, reshape=true) - - w̅ = Average(model.velocities.w, dims=(1, 2), with_halos=true) - computed_w_profile_with_halos = w̅(model) - @test_skip size(computed_w_profile_with_halos) == (1, 1, Nz+1+2Hz) - @test_skip computed_w_profile_with_halos[1+Hz:Nz+1+Hz] ≈ znodes(Face, grid) -end - -function run_zonal_average_tests(arch, FT) - topo = (Periodic, Bounded, Bounded) - Nx = Ny = Nz = 4 - grid = RegularCartesianGrid(topology=topo, size=(Nx, Ny, Nz), extent=(100, 100, 100)) - Hx, Hy, Hz = halo_size(grid) - - model = IncompressibleModel(grid=grid, architecture=arch, float_type=FT) - - linear(x, y, z) = z - set!(model, T=linear, v=linear) - - T̅ = Average(model.tracers.T, dims=1, with_halos=false) - computed_T_slice = T̅(model) - @test_skip size(computed_T_slice) == (1, Ny, Nz) - - computed_T_slice = dropdims(computed_T_slice, dims=1) - zC = znodes(Cell, grid) - @test_skip all(computed_T_slice[j, :] ≈ zC for j in 1:Ny) - - T̅ = Average(model.tracers.T, dims=1, with_halos=true) - computed_T_slice_with_halos = T̅(model) - @test_skip size(computed_T_slice_with_halos) == (1, Ny+2Hy, Nz+2Hz) - - computed_T_slice_with_halos = dropdims(computed_T_slice_with_halos, dims=1) - @test_skip computed_T_slice_with_halos[1+Hy:Ny+Hy, 1+Hz:Nz+Hz] ≈ computed_T_slice - - v̅ = Average(model.velocities.v, dims=1, with_halos=false) - computed_v_slice = v̅(model) - @test_skip size(computed_v_slice) == (1, Ny+1, Nz) - - computed_v_slice = dropdims(computed_v_slice, dims=1) - zC = znodes(Cell, grid) - @test_skip all(computed_v_slice[j, :] ≈ zC for j in 1:Ny) - - v̅ = Average(model.velocities.v, dims=1, with_halos=true) - computed_v_slice_with_halos = v̅(model) - @test_skip size(computed_v_slice_with_halos) == (1, Ny+1+2Hy, Nz+2Hz) - - computed_v_slice_with_halos = dropdims(computed_v_slice_with_halos, dims=1) - @test_skip computed_v_slice_with_halos[1+Hy:Ny+1+Hy, 1+Hz:Nz+Hz] ≈ computed_v_slice -end - -function run_volume_average_tests(arch, FT) - Nx = Ny = Nz = 4 - grid = RegularCartesianGrid(size=(Nx, Ny, Nz), extent=(100, 100, 100)) - model = IncompressibleModel(grid=grid, architecture=arch, float_type=FT) - - T₀(x, y, z) = z - set!(model, T=T₀) - - T̅ = Average(model.tracers.T, dims=(1, 2, 3), time_interval=0.5second, with_halos=false) - computed_scalar = T̅(model) - @test_skip size(computed_scalar) == (1, 1, 1) - @test_skip all(computed_scalar .≈ -50.0) - - T̅ = Average(model.tracers.T, dims=(1, 2, 3), time_interval=0.5second, with_halos=true) - computed_scalar_with_halos = T̅(model) - @test_skip size(computed_scalar_with_halos) == (1, 1, 1) - @test_skip all(computed_scalar_with_halos .≈ -50.0) -end - function nan_checker_aborts_simulation(arch, FT) grid = RegularCartesianGrid(size=(4, 2, 1), extent=(1, 1, 1)) model = IncompressibleModel(grid=grid, architecture=arch, float_type=FT) @@ -205,17 +110,6 @@ end @testset "Diagnostics" begin @info "Testing diagnostics..." - for arch in archs - @testset "Average [$(typeof(arch))]" begin - @info " Testing averages [$(typeof(arch))]" - for FT in float_types - run_horizontal_average_tests(arch, FT) - run_zonal_average_tests(arch, FT) - run_volume_average_tests(arch, FT) - end - end - end - for arch in archs @testset "NaN Checker [$(typeof(arch))]" begin @info " Testing NaN Checker [$(typeof(arch))]" diff --git a/test/test_output_writers.jl b/test/test_output_writers.jl index 96dcc5d150..df6eaa346f 100644 --- a/test/test_output_writers.jl +++ b/test/test_output_writers.jl @@ -4,11 +4,46 @@ using Oceananigans.BoundaryConditions: BoundaryFunction, PBC, FBC, ZFBC using Oceananigans.Diagnostics using Oceananigans.Fields +function instantiate_windowed_time_average(model) + + set!(model, u = (x, y, z) -> rand()) + + u, v, w = model.velocities + + u₀ = similar(interior(u)) + u₀ .= interior(u) + + wta = WindowedTimeAverage(model.velocities.u, time_window=1.0, time_interval=10.0) + + return all(wta(model) .== u₀) +end + +function time_step_with_windowed_time_average(model) + + model.clock.iteration = 0 + model.clock.time = 0.0 + + set!(model, u=0, v=0, w=0, T=0, S=0) + + wta = WindowedTimeAverage(model.velocities.u, time_window=2.0, time_interval=4.0) + + simulation = Simulation(model, Δt=1.0, stop_time=4.0) + simulation.diagnostics[:u_avg] = wta + run!(simulation) + + return all(wta(model) .== interior(model.velocities.u)) +end + +##### +##### NetCDFOutputWriter tests +##### + function run_thermal_bubble_netcdf_tests(arch) Nx, Ny, Nz = 16, 16, 16 Lx, Ly, Lz = 100, 100, 100 - grid = RegularCartesianGrid(size=(Nx, Ny, Nz), extent=(Lx, Ly, Lz)) + topo = (Periodic, Periodic, Bounded) + grid = RegularCartesianGrid(topology=topo, size=(Nx, Ny, Nz), extent=(Lx, Ly, Lz)) closure = IsotropicDiffusivity(ν=4e-2, κ=4e-2) model = IncompressibleModel(architecture=arch, grid=grid, closure=closure) simulation = Simulation(model, Δt=6, stop_iteration=10) @@ -26,32 +61,40 @@ function run_thermal_bubble_netcdf_tests(arch) "T" => model.tracers.T, "S" => model.tracers.S) - nc_filename = "test_dump_$(typeof(arch)).nc" - nc_writer = NetCDFOutputWriter(model, outputs, filename=nc_filename, iteration_interval=10, verbose=true) + nc_filepath = "test_dump_$(typeof(arch)).nc" + nc_writer = NetCDFOutputWriter(model, outputs, filepath=nc_filepath, iteration_interval=10, verbose=true) push!(simulation.output_writers, nc_writer) - xC_slice = 1:10 - xF_slice = 2:11 - yC_slice = 10:15 - yF_slice = 1 - zC_slice = 10 - zF_slice = 9:11 + i_slice = 1:10 + j_slice = 13 + k_slice = 9:11 + field_slicer = FieldSlicer(i=i_slice, j=j_slice, k=k_slice) + j_slice = j_slice:j_slice # So we can correctly index with it for later tests. - nc_sliced_filename = "test_dump_sliced_$(typeof(arch)).nc" - nc_sliced_writer = - NetCDFOutputWriter(model, outputs, filename=nc_sliced_filename, iteration_interval=10, verbose=true, - xC=xC_slice, xF=xF_slice, yC=yC_slice, - yF=yF_slice, zC=zC_slice, zF=zF_slice) + nc_sliced_filepath = "test_dump_sliced_$(typeof(arch)).nc" + nc_sliced_writer = NetCDFOutputWriter(model, outputs, filepath=nc_sliced_filepath, iteration_interval=10, + field_slicer=field_slicer, verbose=true) push!(simulation.output_writers, nc_sliced_writer) run!(simulation) - ds3 = Dataset(nc_filename) + ds3 = Dataset(nc_filepath) @test haskey(ds3.attrib, "date") && !isnothing(ds3.attrib["date"]) @test haskey(ds3.attrib, "Julia") && !isnothing(ds3.attrib["Julia"]) @test haskey(ds3.attrib, "Oceananigans") && !isnothing(ds3.attrib["Oceananigans"]) + @test haskey(ds3.attrib, "iteration_interval") && ds3.attrib["iteration_interval"] == 10 + @test haskey(ds3.attrib, "output iteration interval") && !isnothing(ds3.attrib["output iteration interval"]) + + @test eltype(ds3["time"]) == eltype(model.clock.time) + + @test eltype(ds3["xC"]) == Float64 + @test eltype(ds3["xF"]) == Float64 + @test eltype(ds3["yC"]) == Float64 + @test eltype(ds3["yF"]) == Float64 + @test eltype(ds3["zC"]) == Float64 + @test eltype(ds3["zF"]) == Float64 @test length(ds3["xC"]) == Nx @test length(ds3["yC"]) == Ny @@ -74,6 +117,12 @@ function run_thermal_bubble_netcdf_tests(arch) @test ds3["zC"][end] == grid.zC[Nz] @test ds3["zF"][end] == grid.zF[Nz+1] # z is Bounded + @test eltype(ds3["u"]) == Float32 + @test eltype(ds3["v"]) == Float32 + @test eltype(ds3["w"]) == Float32 + @test eltype(ds3["T"]) == Float32 + @test eltype(ds3["S"]) == Float32 + u = ds3["u"][:, :, :, end] v = ds3["v"][:, :, :, end] w = ds3["w"][:, :, :, end] @@ -88,32 +137,49 @@ function run_thermal_bubble_netcdf_tests(arch) @test all(T .≈ Array(interiorparent(model.tracers.T))) @test all(S .≈ Array(interiorparent(model.tracers.S))) - ds2 = Dataset(nc_sliced_filename) + ds2 = Dataset(nc_sliced_filepath) @test haskey(ds2.attrib, "date") && !isnothing(ds2.attrib["date"]) @test haskey(ds2.attrib, "Julia") && !isnothing(ds2.attrib["Julia"]) @test haskey(ds2.attrib, "Oceananigans") && !isnothing(ds2.attrib["Oceananigans"]) - - @test length(ds2["xC"]) == length(xC_slice) - @test length(ds2["xF"]) == length(xF_slice) - @test length(ds2["yC"]) == length(yC_slice) - @test length(ds2["yF"]) == length(yF_slice) - @test length(ds2["zC"]) == length(zC_slice) - @test length(ds2["zF"]) == length(zF_slice) - - @test ds2["xC"][1] == grid.xC[xC_slice[1]] - @test ds2["xF"][1] == grid.xF[xF_slice[1]] - @test ds2["yC"][1] == grid.yC[yC_slice[1]] - @test ds2["yF"][1] == grid.yF[yF_slice] - @test ds2["zC"][1] == grid.zC[zC_slice] - @test ds2["zF"][1] == grid.zF[zF_slice[1]] - - @test ds2["xC"][end] == grid.xC[xC_slice[end]] - @test ds2["xF"][end] == grid.xF[xF_slice[end]] - @test ds2["yC"][end] == grid.yC[yC_slice[end]] - @test ds2["yF"][end] == grid.yF[yF_slice] - @test ds2["zC"][end] == grid.zC[zC_slice] - @test ds2["zF"][end] == grid.zF[zF_slice[end]] + @test haskey(ds2.attrib, "iteration_interval") && ds2.attrib["iteration_interval"] == 10 + @test haskey(ds2.attrib, "output iteration interval") && !isnothing(ds2.attrib["output iteration interval"]) + + @test eltype(ds2["time"]) == eltype(model.clock.time) + + @test eltype(ds2["xC"]) == Float64 + @test eltype(ds2["xF"]) == Float64 + @test eltype(ds2["yC"]) == Float64 + @test eltype(ds2["yF"]) == Float64 + @test eltype(ds2["zC"]) == Float64 + @test eltype(ds2["zF"]) == Float64 + + @test length(ds2["xC"]) == length(i_slice) + @test length(ds2["xF"]) == length(i_slice) + @test length(ds2["yC"]) == length(j_slice) + @test length(ds2["yF"]) == length(j_slice) + @test length(ds2["zC"]) == length(k_slice) + @test length(ds2["zF"]) == length(k_slice) + + @test ds2["xC"][1] == grid.xC[i_slice[1]] + @test ds2["xF"][1] == grid.xF[i_slice[1]] + @test ds2["yC"][1] == grid.yC[j_slice[1]] + @test ds2["yF"][1] == grid.yF[j_slice[1]] + @test ds2["zC"][1] == grid.zC[k_slice[1]] + @test ds2["zF"][1] == grid.zF[k_slice[1]] + + @test ds2["xC"][end] == grid.xC[i_slice[end]] + @test ds2["xF"][end] == grid.xF[i_slice[end]] + @test ds2["yC"][end] == grid.yC[j_slice[end]] + @test ds2["yF"][end] == grid.yF[j_slice[end]] + @test ds2["zC"][end] == grid.zC[k_slice[end]] + @test ds2["zF"][end] == grid.zF[k_slice[end]] + + @test eltype(ds2["u"]) == Float32 + @test eltype(ds2["v"]) == Float32 + @test eltype(ds2["w"]) == Float32 + @test eltype(ds2["T"]) == Float32 + @test eltype(ds2["S"]) == Float32 u_sliced = ds2["u"][:, :, :, end] v_sliced = ds2["v"][:, :, :, end] @@ -123,18 +189,19 @@ function run_thermal_bubble_netcdf_tests(arch) close(ds2) - @test all(u_sliced .≈ Array(interiorparent(model.velocities.u))[xF_slice, yC_slice, zC_slice]) - @test all(v_sliced .≈ Array(interiorparent(model.velocities.v))[xC_slice, yF_slice, zC_slice]) - @test all(w_sliced .≈ Array(interiorparent(model.velocities.w))[xC_slice, yC_slice, zF_slice]) - @test all(T_sliced .≈ Array(interiorparent(model.tracers.T))[xC_slice, yC_slice, zC_slice]) - @test all(S_sliced .≈ Array(interiorparent(model.tracers.S))[xC_slice, yC_slice, zC_slice]) + @test all(u_sliced .≈ Array(interiorparent(model.velocities.u))[i_slice, j_slice, k_slice]) + @test all(v_sliced .≈ Array(interiorparent(model.velocities.v))[i_slice, j_slice, k_slice]) + @test all(w_sliced .≈ Array(interiorparent(model.velocities.w))[i_slice, j_slice, k_slice]) + @test all(T_sliced .≈ Array(interiorparent(model.tracers.T))[i_slice, j_slice, k_slice]) + @test all(S_sliced .≈ Array(interiorparent(model.tracers.S))[i_slice, j_slice, k_slice]) end function run_thermal_bubble_netcdf_tests_with_halos(arch) Nx, Ny, Nz = 16, 16, 16 Lx, Ly, Lz = 100, 100, 100 - grid = RegularCartesianGrid(size=(Nx, Ny, Nz), extent=(Lx, Ly, Lz)) + topo = (Periodic, Periodic, Bounded) + grid = RegularCartesianGrid(topology=topo, size=(Nx, Ny, Nz), extent=(Lx, Ly, Lz)) closure = IsotropicDiffusivity(ν=4e-2, κ=4e-2) model = IncompressibleModel(architecture=arch, grid=grid, closure=closure) simulation = Simulation(model, Δt=6, stop_iteration=10) @@ -153,17 +220,30 @@ function run_thermal_bubble_netcdf_tests_with_halos(arch) "T" => model.tracers.T, "S" => model.tracers.S ) - nc_filename = "test_dump_with_halos_$(typeof(arch)).nc" - nc_writer = NetCDFOutputWriter(model, outputs, filename=nc_filename, iteration_interval=10, with_halos=true) + + nc_filepath = "test_dump_with_halos_$(typeof(arch)).nc" + nc_writer = NetCDFOutputWriter(model, outputs, filepath=nc_filepath, iteration_interval=10, + field_slicer=FieldSlicer(with_halos=true)) push!(simulation.output_writers, nc_writer) run!(simulation) - ds = Dataset(nc_filename) + ds = Dataset(nc_filepath) @test haskey(ds.attrib, "date") && !isnothing(ds.attrib["date"]) @test haskey(ds.attrib, "Julia") && !isnothing(ds.attrib["Julia"]) @test haskey(ds.attrib, "Oceananigans") && !isnothing(ds.attrib["Oceananigans"]) + @test haskey(ds.attrib, "iteration_interval") && ds.attrib["iteration_interval"] == 10 + @test haskey(ds.attrib, "output iteration interval") && !isnothing(ds.attrib["output iteration interval"]) + + @test eltype(ds["time"]) == eltype(model.clock.time) + + @test eltype(ds["xC"]) == Float64 + @test eltype(ds["xF"]) == Float64 + @test eltype(ds["yC"]) == Float64 + @test eltype(ds["yF"]) == Float64 + @test eltype(ds["zC"]) == Float64 + @test eltype(ds["zF"]) == Float64 Hx, Hy, Hz = grid.Hx, grid.Hy, grid.Hz @test length(ds["xC"]) == Nx+2Hx @@ -187,6 +267,12 @@ function run_thermal_bubble_netcdf_tests_with_halos(arch) @test ds["zC"][end] == grid.zC[Nz+Hz] @test ds["zF"][end] == grid.zF[Nz+Hz+1] # z is Bounded + @test eltype(ds["u"]) == Float32 + @test eltype(ds["v"]) == Float32 + @test eltype(ds["w"]) == Float32 + @test eltype(ds["T"]) == Float32 + @test eltype(ds["S"]) == Float32 + u = ds["u"][:, :, :, end] v = ds["v"][:, :, :, end] w = ds["w"][:, :, :, end] @@ -208,7 +294,8 @@ function run_netcdf_function_output_tests(arch) Δt = 1.25 iters = 3 - model = IncompressibleModel(grid=RegularCartesianGrid(size=(N, N, N), extent=(L, 2L, 3L))) + grid = RegularCartesianGrid(size=(N, N, N), extent=(L, 2L, 3L)) + model = IncompressibleModel(architecture=arch, grid=grid) simulation = Simulation(model, Δt=Δt, stop_iteration=iters) grid = model.grid @@ -231,19 +318,33 @@ function run_netcdf_function_output_tests(arch) global_attributes = Dict("location" => "Bay of Fundy", "onions" => 7) - nc_filename = "test_function_outputs_$(typeof(arch)).nc" + nc_filepath = "test_function_outputs_$(typeof(arch)).nc" simulation.output_writers[:food] = - NetCDFOutputWriter(model, outputs; - iteration_interval=1, filename=nc_filename, dimensions=dims, verbose=true, + NetCDFOutputWriter(model, outputs; filepath=nc_filepath, + time_interval=Δt, dimensions=dims, array_type=Array{Float64}, verbose=true, global_attributes=global_attributes, output_attributes=output_attributes) + # We should have no problem with time_interval=Δt=1.25 as 1.25 can be represented exactly with + # a floating-point number. + run!(simulation) - ds = Dataset(nc_filename, "r") + ds = Dataset(nc_filepath, "r") @test haskey(ds.attrib, "date") && !isnothing(ds.attrib["date"]) @test haskey(ds.attrib, "Julia") && !isnothing(ds.attrib["Julia"]) @test haskey(ds.attrib, "Oceananigans") && !isnothing(ds.attrib["Oceananigans"]) + @test haskey(ds.attrib, "time_interval") && !isnothing(ds.attrib["time_interval"]) + @test haskey(ds.attrib, "output time interval") && !isnothing(ds.attrib["output time interval"]) + + @test eltype(ds["time"]) == eltype(model.clock.time) + + @test eltype(ds["xC"]) == Float64 + @test eltype(ds["xF"]) == Float64 + @test eltype(ds["yC"]) == Float64 + @test eltype(ds["yF"]) == Float64 + @test eltype(ds["zC"]) == Float64 + @test eltype(ds["zF"]) == Float64 @test length(ds["xC"]) == N @test length(ds["yC"]) == N @@ -269,6 +370,10 @@ function run_netcdf_function_output_tests(arch) @test ds.attrib["location"] == "Bay of Fundy" @test ds.attrib["onions"] == 7 + @test eltype(ds["scalar"]) == Float64 + @test eltype(ds["profile"]) == Float64 + @test eltype(ds["slice"]) == Float64 + @test length(ds["time"]) == iters+1 @test ds["time"][:] == [n*Δt for n in 0:iters] @@ -307,13 +412,13 @@ function run_netcdf_function_output_tests(arch) simulation = Simulation(model, Δt=Δt, stop_iteration=iters) simulation.output_writers[:food] = - NetCDFOutputWriter(model, outputs; - iteration_interval=1, filename=nc_filename, mode="a", dimensions=dims, verbose=true, + NetCDFOutputWriter(model, outputs; filepath=nc_filepath, mode="a", + iteration_interval=1, array_type=Array{Float64}, dimensions=dims, verbose=true, global_attributes=global_attributes, output_attributes=output_attributes) run!(simulation) - ds = Dataset(nc_filename, "r") + ds = Dataset(nc_filepath, "r") @test length(ds["time"]) == iters+1 @test length(ds["scalar"]) == iters+1 @@ -334,8 +439,87 @@ function run_netcdf_function_output_tests(arch) return nothing end +##### +##### JLD2OutputWriter tests +##### + +function jld2_field_output(model) + + model.clock.iteration = 0 + model.clock.time = 0.0 + + set!(model, u = (x, y, z) -> rand(), + v = (x, y, z) -> rand(), + w = (x, y, z) -> rand(), + T = 0, + S = 0) + + simulation = Simulation(model, Δt=1.0, stop_iteration=1) + + simulation.output_writers[:velocities] = JLD2OutputWriter(model, model.velocities, + time_interval = 1.0, + dir = ".", + prefix = "test", + force = true) + + u₀ = data(model.velocities.u)[3, 3, 3] + v₀ = data(model.velocities.v)[3, 3, 3] + w₀ = data(model.velocities.w)[3, 3, 3] + + run!(simulation) + + file = jldopen("test.jld2") + + # Data is saved without halos by default + u₁ = file["timeseries/u/0"][3, 3, 3] + v₁ = file["timeseries/v/0"][3, 3, 3] + w₁ = file["timeseries/w/0"][3, 3, 3] + + close(file) + + rm("test.jld2") + + FT = typeof(u₁) + + return FT(u₀) == u₁ && FT(v₀) == v₁ && FT(w₀) == w₁ +end + +function jld2_sliced_field_output(model) + + model.clock.iteration = 0 + model.clock.time = 0.0 + + set!(model, u = (x, y, z) -> rand(), + v = (x, y, z) -> rand(), + w = (x, y, z) -> rand()) + + simulation = Simulation(model, Δt=1.0, stop_iteration=1) + + simulation.output_writers[:velocities] = + JLD2OutputWriter(model, model.velocities, + time_interval = 1.0, + field_slicer = FieldSlicer(i=1:2, j=1:3, k=:), + dir = ".", + prefix = "test", + force = true) + + run!(simulation) + + file = jldopen("test.jld2") + + u₁ = file["timeseries/u/0"] + v₁ = file["timeseries/v/0"] + w₁ = file["timeseries/w/0"] + + close(file) + + rm("test.jld2") + + return size(u₁) == (2, 3, 4) && size(v₁) == (2, 3, 4) && size(w₁) == (2, 3, 5) +end + function run_jld2_file_splitting_tests(arch) - model = IncompressibleModel(grid=RegularCartesianGrid(size=(16, 16, 16), extent=(1, 1, 1))) + model = IncompressibleModel(architecture=arch, grid=RegularCartesianGrid(size=(16, 16, 16), extent=(1, 1, 1))) simulation = Simulation(model, Δt=1, stop_iteration=10) function fake_bc_init(file, model) @@ -358,7 +542,7 @@ function run_jld2_file_splitting_tests(arch) @test filesize("test_part3.jld2") < 200KiB for n in string.(1:3) - filename = "test_part" * n * ".jld2" + filename = "test_part$n.jld2" jldopen(filename, "r") do file # Test to make sure all files contain structs from `including`. @test file["grid/Nx"] == 16 @@ -372,6 +556,10 @@ function run_jld2_file_splitting_tests(arch) end end +##### +##### Checkpointer tests +##### + """ Run two coarse rising thermal bubble simulations and make sure that when restarting from a checkpoint, the restarted simulation matches the non-restarted @@ -547,35 +735,9 @@ function run_cross_architecture_checkpointer_tests(arch1, arch2) return nothing end -function instantiate_windowed_time_average(model) - - set!(model, u = (x, y, z) -> rand()) - - u, v, w = model.velocities - - u₀ = similar(interior(u)) - u₀ .= interior(u) - - wta = WindowedTimeAverage(model.velocities.u, time_window=1.0, time_interval=10.0) - - return all(wta(model) .== u₀) -end - -function time_step_with_windowed_time_average(model) - model.clock.iteration = 0 - model.clock.time = 0.0 - - set!(model, u=0, v=0, w=0, T=0, S=0) - - wta = WindowedTimeAverage(model.velocities.u, time_window=2.0, time_interval=4.0) - - simulation = Simulation(model, Δt=1.0, stop_time=4.0) - simulation.diagnostics[:u_avg] = wta - run!(simulation) - - return all(wta(model) .== interior(model.velocities.u)) -end - +##### +##### Dependency adding tests +##### function dependencies_added_correctly!(model, windowed_time_average, output_writer) @@ -589,83 +751,115 @@ function dependencies_added_correctly!(model, windowed_time_average, output_writ return windowed_time_average ∈ values(simulation.diagnostics) end -function jld2_field_output(model) +function run_dependency_adding_tests(model) - model.clock.iteration = 0 - model.clock.time = 0.0 + windowed_time_average = WindowedTimeAverage(model.velocities.u, time_window=2.0, time_interval=4.0) - set!(model, u = (x, y, z) -> rand(), - v = (x, y, z) -> rand(), - w = (x, y, z) -> rand(), - T = 0, - S = 0) + output = Dict("time_average" => windowed_time_average) + attributes = Dict("time_average" => Dict("longname" => "A time average", "units" => "arbitrary")) + dimensions = Dict("time_average" => ("xF", "yC", "zC")) - simulation = Simulation(model, Δt=1.0, stop_iteration=1) - - simulation.output_writers[:velocities] = JLD2OutputWriter(model, model.velocities, - time_interval = 1.0, - dir = ".", - prefix = "test", - force = true) - - u₀ = data(model.velocities.u)[3, 3, 3] - v₀ = data(model.velocities.v)[3, 3, 3] - w₀ = data(model.velocities.w)[3, 3, 3] - - run!(simulation) - - file = jldopen("test.jld2") - - # Data is saved without halos by default - u₁ = file["timeseries/u/0"][3, 3, 3] - v₁ = file["timeseries/v/0"][3, 3, 3] - w₁ = file["timeseries/w/0"][3, 3, 3] + # JLD2 dependencies test + jld2_output_writer = JLD2OutputWriter(model, output, time_interval=4.0, dir=".", prefix="test", force=true) - close(file) + @test dependencies_added_correctly!(model, windowed_time_average, jld2_output_writer) - rm("test.jld2") + # NetCDF dependency test + netcdf_output_writer = NetCDFOutputWriter(model, output, + time_interval = 4.0, + filepath = "test.nc", + output_attributes = attributes, + dimensions = dimensions) - FT = typeof(u₁) + @test dependencies_added_correctly!(model, windowed_time_average, netcdf_output_writer) - return FT(u₀) == u₁ && FT(v₀) == v₁ && FT(w₀) == w₁ + return nothing end -function jld2_sliced_field_output(model) - - model.clock.iteration = 0 - model.clock.time = 0.0 - - set!(model, u = (x, y, z) -> rand(), - v = (x, y, z) -> rand(), - w = (x, y, z) -> rand()) - - simulation = Simulation(model, Δt=1.0, stop_iteration=1) - - simulation.output_writers[:velocities] = - JLD2OutputWriter(model, model.velocities, - time_interval = 1.0, - field_slicer = FieldSlicer(i=1:2, j=1:3, k=:), - dir = ".", - prefix = "test", - force = true) +##### +##### Time averaging of output tests +##### + +function run_windowed_time_averaging_simulation_tests!(model) + model.clock.iteration = model.clock.time = 0 + simulation = Simulation(model, Δt=1.0, stop_iteration=0) + + jld2_output_writer = JLD2OutputWriter(model, model.velocities, + time_interval = π, + time_averaging_window = 1.0, + prefix = "test", + force = true) + + nc_filepath = "windowed_time_average_test1.nc" + nc_outputs = Dict(string(name) => field for (name, field) in pairs(model.velocities)) + nc_output_writer = NetCDFOutputWriter(model, nc_outputs, filepath=nc_filepath, + # https://github.com/Alexander-Barth/NCDatasets.jl/issues/105 + time_interval = Float64(π), time_averaging_window = 1.0) + + jld2_outputs_are_time_averaged = Tuple(typeof(out) <: WindowedTimeAverage for out in jld2_output_writer.outputs) + nc_outputs_are_time_averaged = Tuple(typeof(out) <: WindowedTimeAverage for out in values(nc_output_writer.outputs)) + + @test all(jld2_outputs_are_time_averaged) + @test all(nc_outputs_are_time_averaged) + + # Test that the collection does *not* start when a simulation is initialized + # when time_interval ≠ time_averaging_window + simulation.output_writers[:jld2] = jld2_output_writer + simulation.output_writers[:nc] = nc_output_writer run!(simulation) + + jld2_u_windowed_time_average = simulation.output_writers[:jld2].outputs.u + nc_w_windowed_time_average = simulation.output_writers[:nc].outputs["w"] + + @test !(jld2_u_windowed_time_average.collecting) + @test !(nc_w_windowed_time_average.collecting) + + # Test that time-averaging is finalized prior to output even when averaging over + # time_window is not fully realized. For this, step forward to a time at which + # collection should start. Note that time_interval = π and time_window = 1.0. + simulation.Δt = 1.5 + simulation.stop_iteration = 2 + run!(simulation) # model.clock.time = 3.0, just before output but after average-collection. + + @test jld2_u_windowed_time_average.collecting + @test nc_w_windowed_time_average.collecting + + # Step forward such that time_window is not reached, but output will occur. + simulation.Δt = π - 3 + 0.01 # ≈ 0.15 < 1.0 + simulation.stop_iteration = 3 + run!(simulation) # model.clock.time ≈ 3.15, after output + + @test jld2_u_windowed_time_average.previous_interval_stop_time == + model.clock.time - rem(model.clock.time, jld2_u_windowed_time_average.time_interval) + + @test nc_w_windowed_time_average.previous_interval_stop_time == + model.clock.time - rem(model.clock.time, nc_w_windowed_time_average.time_interval) + + # Test that collection does start when a simulation is initialized and + # time_interval == time_averaging_window + model.clock.iteration = model.clock.time = 0 + + simulation.output_writers[:jld2] = JLD2OutputWriter(model, model.velocities, + time_interval = π, + time_averaging_window = π, + prefix = "test", + force = true) + + nc_filepath = "windowed_time_average_test2.nc" + nc_outputs = Dict(string(name) => field for (name, field) in pairs(model.velocities)) + simulation.output_writers[:nc] = NetCDFOutputWriter(model, nc_outputs, filepath=nc_filepath, + # https://github.com/Alexander-Barth/NCDatasets.jl/issues/105 + time_interval = Float64(π), time_averaging_window = Float64(π)) - file = jldopen("test.jld2") - - u₁ = file["timeseries/u/0"] - v₁ = file["timeseries/v/0"] - w₁ = file["timeseries/w/0"] - - close(file) + run!(simulation) - rm("test.jld2") + @test simulation.output_writers[:jld2].outputs.u.collecting + @test simulation.output_writers[:nc].outputs["w"].collecting - return size(u₁) == (2, 3, 4) && size(v₁) == (2, 3, 4) && size(w₁) == (2, 3, 5) + return nothing end - - function jld2_time_averaging_of_horizontal_averages(model) model.clock.iteration = 0 @@ -710,131 +904,165 @@ function jld2_time_averaging_of_horizontal_averages(model) return wu == zero(FT) && wT == zero(FT) && uv == FT(2) end -@testset "Output writers" begin - @info "Testing output writers..." - - for arch in archs - @testset "NetCDF [$(typeof(arch))]" begin - @info " Testing NetCDF output writer [$(typeof(arch))]..." - run_thermal_bubble_netcdf_tests(arch) - run_thermal_bubble_netcdf_tests_with_halos(arch) - run_netcdf_function_output_tests(arch) - end - - @testset "JLD2 [$(typeof(arch))]" begin - @info " Testing JLD2 output writer [$(typeof(arch))]..." - run_jld2_file_splitting_tests(arch) - end - - @testset "Checkpointer [$(typeof(arch))]" begin - @info " Testing Checkpointer [$(typeof(arch))]..." - run_thermal_bubble_checkpointer_tests(arch) - run_checkpoint_with_function_bcs_tests(arch) - @hascuda run_cross_architecture_checkpointer_tests(CPU(), GPU()) - @hascuda run_cross_architecture_checkpointer_tests(GPU(), CPU()) - end +function run_netcdf_time_averaging_tests(arch) + topo = (Periodic, Periodic, Periodic) + domain = (x=(0, 1), y=(0, 1), z=(0, 1)) + grid = RegularCartesianGrid(topology=topo, size=(4, 4, 4); domain...) + + λ(x, y, z) = x + (1 - y)^2 + tanh(z) + Fc(x, y, z, t, c) = - λ(x, y, z) * c + + c_forcing = Forcing(Fc, field_dependencies=(:c,)) + + model = IncompressibleModel( + grid = grid, + architecture = arch, + timestepper = :RungeKutta3, + tracers = :c, + forcing = (c=c_forcing,), + coriolis = nothing, + buoyancy = nothing, + closure = nothing + ) - grid = RegularCartesianGrid(size=(4, 4, 4), extent=(1, 1, 1)) - model = IncompressibleModel(architecture=arch, grid=grid) + set!(model, c=1) + + Δt = 1/512 # Nice floating-point number + simulation = Simulation(model, Δt=Δt, stop_time=50Δt) + + ∫c_dxdy = AveragedField(model.tracers.c, dims=(1, 2)) + + nc_outputs = Dict("c" => ∫c_dxdy) + nc_dimensions = Dict("c" => ("zC",)) + + horizontal_average_nc_filepath = "decay_averaged_field_test.nc" + simulation.output_writers[:horizontal_average] = + NetCDFOutputWriter(model, nc_outputs, filepath=horizontal_average_nc_filepath, time_interval=10Δt, + dimensions=nc_dimensions, array_type=Array{Float64}, verbose=true) + + time_average_nc_filepath = "decay_windowed_time_average_test.nc" + window = 6Δt + stride = 2 + simulation.output_writers[:time_average] = + NetCDFOutputWriter(model, nc_outputs, filepath=time_average_nc_filepath, array_type=Array{Float64}, + time_interval=10Δt, time_averaging_window=window, time_averaging_stride=stride, + dimensions=nc_dimensions, verbose=true) - @testset "WindowedTimeAverage and FieldSlicer [$(typeof(arch))]" begin - @info " Testing WindowedTimeAverage and FieldSlicer [$(typeof(arch))]" + run!(simulation) - ##### - ##### Field slicing and field output - ##### - - @test FieldSlicer() isa FieldSlicer - @test instantiate_windowed_time_average(model) - @test jld2_field_output(model) + close(simulation.output_writers[:horizontal_average].dataset) + close(simulation.output_writers[:time_average].dataset) - - ##### - ##### Dependency-adding - ##### + ##### Horizontal average should evaluate to + ##### + ##### c̄(z, t) = ∫₀¹ ∫₀¹ exp{- λ(x, y, z) * t} dx dy + ##### = 1 / (Nx*Ny) * Σᵢ₌₁ᴺˣ Σⱼ₌₁ᴺʸ exp{- λ(i, j, k) * t} + ##### + ##### which we can compute analytically. - windowed_time_average = WindowedTimeAverage(model.velocities.u, time_window=2.0, time_interval=4.0) + ds = NCDataset(horizontal_average_nc_filepath) - output = Dict("time_average" => windowed_time_average) - attributes = Dict("time_average" => Dict("longname" => "A time average", "units" => "arbitrary")) - dimensions = Dict("time_average" => ("xF", "yC", "zC")) + Nx, Ny, Nz = size(grid) + xs, ys, zs = nodes(model.tracers.c) - # JLD2 dependencies test - jld2_output_writer = JLD2OutputWriter(model, output, time_interval=4.0, dir=".", prefix="test", force=true) + c̄(z, t) = 1 / (Nx * Ny) * sum(exp(-λ(x, y, z) * t) for x in xs for y in ys) - @test dependencies_added_correctly!(model, windowed_time_average, jld2_output_writer) + for (n, t) in enumerate(ds["time"]) + @test ds["c"][:, n] ≈ c̄.(zs, t) + end - # NetCDF dependency test - netcdf_output_writer = NetCDFOutputWriter(model, output, - time_interval = 4.0, - filename = "test.nc", - output_attributes = attributes, - dimensions = dimensions) + close(ds) - @test dependencies_added_correctly!(model, windowed_time_average, netcdf_output_writer) + ##### + ##### Test strided windowed time average against analytic solution + ##### - ##### - ##### Time-stepping and Simulations.run! with WindowedTimeAverage - ##### - - @test time_step_with_windowed_time_average(model) + ds = NCDataset(time_average_nc_filepath) - model.clock.iteration = model.clock.time = 0 - simulation = Simulation(model, Δt=1.0, stop_iteration=0) + attribute_names = ( + "time_interval", "output time interval", + "time_averaging_window", "time averaging window", + "time_averaging_stride", "time averaging stride") - jld2_output_writer = JLD2OutputWriter(model, model.velocities, - time_interval = π, - time_averaging_window = 1.0, - prefix = "test", - force = true) + for name in attribute_names + @test haskey(ds.attrib, name) && !isnothing(ds.attrib[name]) + end + + c̄(ts) = 1/length(ts) * sum(c̄.(zs, t) for t in ts) - outputs_are_time_averaged = Tuple(typeof(out) <: WindowedTimeAverage for out in jld2_output_writer.outputs) + window_size = Int(window/Δt) + for (n, t) in enumerate(ds["time"][2:end]) + averaging_times = [t - n*Δt for n in 0:stride:window_size-1] + @test ds["c"][:, n+1] ≈ c̄(averaging_times) + end - @test all(outputs_are_time_averaged) - - # Test the collection does *not* start when a simulation is initialized - # when time_interval ≠ time_averaging_window - simulation.output_writers[:jld2] = jld2_output_writer + close(ds) - run!(simulation) + return nothing +end - windowed_time_average = simulation.output_writers[:jld2].outputs.u +##### +##### Run output writer tests! +##### - @test !(windowed_time_average.collecting) +@testset "Output writers" begin + @info "Testing output writers..." - # Test that time-averaging is finalized prior to output even when averaging over - # time_window is not fully realized. For this, step forward to a time at which - # collection should start. Note that time_interval = π and time_window = 1.0. - simulation.Δt = 1.5 - simulation.stop_iteration = 2 - run!(simulation) # model.clock.time = 3.0, just before output but after average-collection. + @testset "FieldSlicer" begin + @info " Testing FieldSlicer..." + @test FieldSlicer() isa FieldSlicer + end - @test windowed_time_average.collecting + for arch in archs + # Some tests can reuse this same grid and model. + grid = RegularCartesianGrid(size=(4, 4, 4), extent=(1, 1, 1)) + model = IncompressibleModel(architecture=arch, grid=grid) - # Step forward such that time_window is not reached, but output will occur. - simulation.Δt = π - 3 + 0.01 # ≈ 0.15 < 1.0 - simulation.stop_iteration = 3 - run!(simulation) # model.clock.time ≈ 3.15, after output + @testset "WindowedTimeAverage" begin + @info " Testing WindowedTimeAverage..." + @test instantiate_windowed_time_average(model) + @test time_step_with_windowed_time_average(model) + end - @test windowed_time_average.previous_interval_stop_time == - model.clock.time - rem(model.clock.time, windowed_time_average.time_interval) + @testset "NetCDF [$(typeof(arch))]" begin + @info " Testing NetCDF output writer [$(typeof(arch))]..." + run_thermal_bubble_netcdf_tests(arch) + run_thermal_bubble_netcdf_tests_with_halos(arch) + run_netcdf_function_output_tests(arch) + end - # Test the collection does start when a simulation is initialized and - # time_interval = time_averaging_window - model.clock.iteration = model.clock.time = 0 - simulation.output_writers[:jld2] = JLD2OutputWriter(model, model.velocities, - time_interval = π, - time_averaging_window = π, - prefix = "test", - force = true) + @testset "JLD2 [$(typeof(arch))]" begin + @info " Testing JLD2 output writer [$(typeof(arch))]..." + + @test jld2_field_output(model) + @test jld2_sliced_field_output(model) + + run_jld2_file_splitting_tests(arch) + end - run!(simulation) + @testset "Checkpointer [$(typeof(arch))]" begin + @info " Testing Checkpointer [$(typeof(arch))]..." + + run_thermal_bubble_checkpointer_tests(arch) + run_checkpoint_with_function_bcs_tests(arch) + + @hascuda run_cross_architecture_checkpointer_tests(CPU(), GPU()) + @hascuda run_cross_architecture_checkpointer_tests(GPU(), CPU()) + end - windowed_time_average = simulation.output_writers[:jld2].outputs.u + @testset "Dependency adding [$(typeof(arch))]" begin + @info " Testing dependency adding [$(typeof(arch))]..." + run_dependency_adding_tests(model) + end - @test windowed_time_average.collecting + @testset "Time averaging of output [$(typeof(arch))]" begin + @info " Testing time averaging of output [$(typeof(arch))]..." + run_windowed_time_averaging_simulation_tests!(model) + @test jld2_time_averaging_of_horizontal_averages(model) + + run_netcdf_time_averaging_tests(arch) end end end diff --git a/test/test_utils.jl b/test/test_utils.jl new file mode 100644 index 0000000000..359e5e7f4b --- /dev/null +++ b/test/test_utils.jl @@ -0,0 +1,30 @@ +@testset "Utils" begin + @info "Testing utils..." + + @testset "prettytime" begin + @test prettytime(0) == "0 seconds" + @test prettytime(35e-15) == "3.500e-14 seconds" + + @test prettytime(1e-9) == "1 ns" + @test prettytime(1e-6) == "1 μs" + @test prettytime(1e-3) == "1 ms" + + @test prettytime(second) == "1 second" + @test prettytime(minute) == "1 minute" + @test prettytime(hour) == "1 hour" + @test prettytime(day) == "1 day" + @test prettytime(year) == "1 year" + + @test prettytime(2second) == "2 seconds" + @test prettytime(4minute) == "4 minutes" + @test prettytime(8hour) == "8 hours" + @test prettytime(16day) == "16 days" + @test prettytime(32year) == "32 years" + + @test prettytime(13.7seconds) == "13.700 seconds" + @test prettytime(6.666minutes) == "6.666 minutes" + @test prettytime(1.234hour) == "1.234 hours" + @test prettytime(40.5days) == "40.500 days" + @test prettytime(5.0001years) == "5.000 years" + end +end diff --git a/verification/stratified_couette_flow/stratified_couette_flow.jl b/verification/stratified_couette_flow/stratified_couette_flow.jl index dcd7f61257..d1afffe147 100644 --- a/verification/stratified_couette_flow/stratified_couette_flow.jl +++ b/verification/stratified_couette_flow/stratified_couette_flow.jl @@ -1,6 +1,7 @@ using Statistics, Printf using Oceananigans +using Oceananigans.Fields using Oceananigans.TurbulenceClosures using Oceananigans.OutputWriters using Oceananigans.Diagnostics @@ -11,7 +12,8 @@ function uτ(model, Uavg, U_wall) Nz, Hz, Δz = model.grid.Nz, model.grid.Hz, model.grid.Δz ν = model.closure.ν - U = Uavg(model)[1+Hz:end-Hz] # Exclude average of halo region. + compute!(Uavg) + U = Array(Uavg.data[1, 1, 1:model.grid.Nz]) # Exclude average of halo region. # Use a finite difference to calculate dU/dz at the top and bottomtom walls. # The distance between the center of the cell adjacent to the wall and the @@ -29,7 +31,8 @@ function q_wall(model, Tavg, Θ_wall) Nz, Hz, Δz = model.grid.Nz, model.grid.Hz, model.grid.Δz κ = model.closure.κ.T - Θ = Tavg(model)[1+Hz:end-Hz] # Exclude average of halo region. + compute!(Tavg) + Θ = Array(Tavg.data[1, 1, 1:model.grid.Nz]) # Exclude average of halo region. # Use a finite difference to calculate dθ/dz at the top and bottomtom walls. # The distance between the center of the cell adjacent to the wall and the @@ -189,20 +192,20 @@ function simulate_stratified_couette_flow(; Nxy, Nz, arch=GPU(), h=1, U_wall=1, ##### Set up profile output writer ##### - Uavg = Average(model.velocities.u, dims=(1, 2), return_type=Array) - Vavg = Average(model.velocities.v, dims=(1, 2), return_type=Array) - Wavg = Average(model.velocities.w, dims=(1, 2), return_type=Array) - Tavg = Average(model.tracers.T, dims=(1, 2), return_type=Array) - νavg = Average(model.diffusivities.νₑ, dims=(1, 2), return_type=Array) - κavg = Average(model.diffusivities.κₑ.T, dims=(1, 2), return_type=Array) + Uavg = AveragedField(model.velocities.u, dims=(1, 2)) + Vavg = AveragedField(model.velocities.v, dims=(1, 2)) + Wavg = AveragedField(model.velocities.w, dims=(1, 2)) + Tavg = AveragedField(model.tracers.T, dims=(1, 2)) + νavg = AveragedField(model.diffusivities.νₑ, dims=(1, 2)) + κavg = AveragedField(model.diffusivities.κₑ.T, dims=(1, 2)) profiles = Dict( - :u => model -> Uavg(model), - :v => model -> Vavg(model), - :w => model -> Wavg(model), - :T => model -> Tavg(model), - :nu => model -> νavg(model), - :kappaT => model -> κavg(model)) + :u => Uavg, + :v => Vavg, + :w => Wavg, + :T => Tavg, + :nu => νavg, + :kappaT => κavg) profile_writer = JLD2OutputWriter(model, profiles, dir=base_dir, prefix=prefix * "_profiles", @@ -263,5 +266,5 @@ function simulate_stratified_couette_flow(; Nxy, Nz, arch=GPU(), h=1, U_wall=1, push!(simulation.output_writers, field_writer, profile_writer, statistics_writer) run!(simulation) - return nothing + return simulation end