diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 49a5568..1acf4dd 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -21,66 +21,104 @@ jobs: fail-fast: false matrix: version: - - '1.10' - python: - - '3.12' - os: - - ubuntu-latest + - '1.11' + # python: + # - 3.9 + # - 3.12 + os: + # - ubuntu-latest - macos-latest arch: - x64 steps: - # - name: Cancel ongoing test runs for previous commits - # uses: styfle/cancel-workflow-action@0.12.1 - # with: - # access_token: ${{ github.token }} - uses: actions/checkout@v4 - - uses: julia-actions/setup-julia@latest + # - name: Set up Python 🐍 ${{ matrix.python }} + # uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python }} + # - name: Create environment with micromamba 🐍🖤 + # uses: mamba-org/setup-micromamba@v1 + # with: + # micromamba-version: '2.0.2-2' + # environment-file: ./environment.yml + # environment-name: oggm_env # it is recommendable to add both name and yml file. + # init-shell: bash + # cache-environment: false + # cache-downloads: false + # # If necessary, we can include .condarc to configure environment + # # condarc-file: ./condarc.yml + # - name: Test creation of environment with micromamba 🔧🐍🖤 + # run: | + # which python + # micromamba env export + # # shell: bash -el {0} + # - name: Update certifi + # run: | + # pip install --upgrade certifi + # # shell: bash -el {0} + # - name: Set ENV Variables for PyCall.jl 🐍 📞 + # run: | + # echo "PYTHON=/Users/runner/micromamba/envs/oggm_env/bin/python" >> "$GITHUB_ENV" + # # shell: bash -el {0} + - uses: julia-actions/setup-julia@v1 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - name: Install dependencies on Ubuntu - if: matrix.os == 'ubuntu-latest' - run: | - sudo apt-get update - sudo apt-get install -y libxml2 libxml2-dev libspatialite7 libspatialite-dev - - name: Install dependencies on macOS - if: matrix.os == 'macos-latest' - run: | - brew install libxml2 libspatialite - - uses: julia-actions/cache@v2 - with: - cache-registries: "false" - cache-compiled: "false" - # - uses: actions/cache@v4 - # env: - # cache-name: cache-artifacts - # with: - # path: ~/.julia/artifacts - # key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - # restore-keys: | - # ${{ runner.os }}-test-${{ env.cache-name }}- - # ${{ runner.os }}-test- - # ${{ runner.os }}- - - uses: julia-actions/julia-buildpkg@v1 - name: Check Julia SSL certifications 🔎🔐 run: | - julia -e 'using NetworkOptions; println(NetworkOptions.bundled_ca_roots()); println(NetworkOptions.ca_roots()); println(NetworkOptions.ca_roots_path()); println(NetworkOptions.ssh_key_path()); println(NetworkOptions.ssh_key_name()); println(NetworkOptions.ssh_pub_key_path())' + julia -e 'using NetworkOptions; println(NetworkOptions.bundled_ca_roots()); println(NetworkOptions.ca_roots_path()); println(NetworkOptions.ssh_key_path()); println(NetworkOptions.ssh_key_name()); println(NetworkOptions.ssh_pub_key_path())' # echo "SSL_CERT_PATH=$(julia -e 'using NetworkOptions; println(NetworkOptions.bundled_ca_roots())')" >> "$GITHUB_ENV" + # shell: bash -el {0} + # - name: Install dependencies on Ubuntu + # if: matrix.os == 'ubuntu-latest' + # run: | + # sudo apt-get update + # sudo apt-get install -y libxml2 libxml2-dev libspatialite7 libspatialite-dev + # echo "LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH" >> "$GITHUB_ENV" + # - name: Install dependencies on macOS + # if: matrix.os == 'macos-latest' + # run: | + # brew install libxml2 libspatialite + # echo "PKG_CONFIG_PATH=/opt/homebrew/opt/libxml2/lib/pkgconfig" >> "$GITHUB_ENV" + # - name: Check that new paths had been exported + # if: matrix.os == 'macos-latest' + # run: | + # echo $PYTHON + # echo $PKG_CONFIG_PATH + # echo $SSL_CERT_FILE + - uses: julia-actions/cache@v1 + with: + cache-registries: "true" + cache-compiled: "true" + - name: Build Julia packages in Ubuntu + uses: julia-actions/julia-buildpkg@v1 + if: matrix.os == 'ubuntu-latest' + # env: + # PYTHON : /Users/runner/micromamba/envs/oggm_env/bin/python + # # The SSL certificate path can be readed from the action "Check Julia SSL certifications" + # # JULIA_SSL_CA_ROOTS_PATH: /etc/ssl/certs/ca-certificates.crt + # SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt + - name: Build Julia packages in MacOS + uses: julia-actions/julia-buildpkg@v1 + if: matrix.os == 'macos-latest' + # env: + # PYTHON : /Users/runner/micromamba/envs/oggm_env/bin/python + # JULIA_SSL_CA_ROOTS_PATH: /Users/runner/hostedtoolcache/julia/1.11.1/x64/share/julia/cert.pem + # SSL_CERT_FILE: /Users/runner/hostedtoolcache/julia/1.11.1/x64/share/julia/cert.pem - name: Run tests in Ubuntu uses: julia-actions/julia-runtest@v1 if: matrix.os == 'ubuntu-latest' - env: - # Specify the file or directory containing the certificate authority roots. See NetworkOptions.ca_roots - JULIA_SSL_CA_ROOTS_PATH: /opt/hostedtoolcache/julia/1.10.7/x64/share/julia/cert.pem - # The default value returned by ca_roots_path() may be overridden by setting the JULIA_SSL_CA_ROOTS_PATH, SSL_CERT_DIR, or SSL_CERT_FILE environment variables, - SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt + # env: + # PYTHON : /Users/runner/micromamba/envs/oggm_env/bin/python + # # JULIA_SSL_CA_ROOTS_PATH: /etc/ssl/certs/ca-certificates.crt + # SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt - name: Run tests in MacOS uses: julia-actions/julia-runtest@v1 if: matrix.os == 'macos-latest' - env: - JULIA_SSL_CA_ROOTS_PATH: /Users/runner/hostedtoolcache/julia/1.10.7/x64/share/julia/cert.pem - SSL_CERT_FILE: /Users/runner/hostedtoolcache/julia/1.10.7/x64/share/julia/cert.pem + # env: + # PYTHON : /Users/runner/micromamba/envs/oggm_env/bin/python + # JULIA_SSL_CA_ROOTS_PATH: /Users/runner/hostedtoolcache/julia/1.11.1/x64/share/julia/cert.pem + # SSL_CERT_FILE: /Users/runner/hostedtoolcache/julia/1.11.1/x64/share/julia/cert.pem - uses: julia-actions/julia-processcoverage@v1 - uses: codecov/codecov-action@v4 with: diff --git a/Project.toml b/Project.toml index 818f285..530bbc6 100644 --- a/Project.toml +++ b/Project.toml @@ -1,39 +1,41 @@ name = "Sleipnir" -uuid = "f5e6c550-199f-11ee-3608-394420200519" +uuid = "10baed72-45ec-4fdd-b59b-ebd9654d36be" authors = ["Jordi Bolibar ", "Facundo Sapienza "] -version = "0.7.0" +version = "0.7.1" [deps] +CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" +CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193" ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" -CondaPkg = "992eb4ea-22a4-4c89-a5bb-47a3300528ab" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6" HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" Infiltrator = "5903a43b-9cc3-4c30-8d17-598619ec4e9b" JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819" -Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" -OpenSSL_jll = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" -PreferenceTools = "ba661fbb-e901-4445-b070-854aec6bfbc5" -PythonCall = "6099a3de-0909-46bc-b1f4-468b9a2dfc0d" +Rasters = "a3a2b9e3-a471-40c9-b274-f788e487c689" Revise = "295af30f-e4ad-537b-8983-00126c2a3abe" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +Tar = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] -CairoMakie = "0.11" -CondaPkg = "0.2.24" +CSV = "0.10.15" +CodecZlib = "0.7.6" Downloads = "1" HDF5 = "0.17" -Infiltrator = "1" JLD2 = "0.4" -PreferenceTools = "0.1.2" -PythonCall = "0.9.23" +JSON = "0.21.4" +NCDatasets = "0.14.6" +Rasters = "0.13.0" Revise = "3" Statistics = "1" -julia = "1.7" +Tar = "1.10.0" +julia = "1.9" [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/data/missing_glaciers.jld2 b/data/missing_glaciers.jld2 index ccb3b91..757eb34 100644 Binary files a/data/missing_glaciers.jld2 and b/data/missing_glaciers.jld2 differ diff --git a/src/Sleipnir.jl b/src/Sleipnir.jl index 6b9b84d..feb2a3f 100644 --- a/src/Sleipnir.jl +++ b/src/Sleipnir.jl @@ -15,9 +15,12 @@ using CairoMakie using Downloads using HDF5 using ComponentArrays - -include("setup/ssl.jl") -using PythonCall, CondaPkg +using Rasters +using CSV +using JSON +using CodecZlib +using Tar +import NCDatasets # ############################################## # ############ PARAMETERS ############### @@ -25,28 +28,7 @@ using PythonCall, CondaPkg cd(@__DIR__) const global root_dir::String = dirname(Base.current_project()) - -# ############################################## -# ############ PYTHON LIBRARIES ############## -# ############################################## - -# We define empty objects for the Python packages -const netCDF4 = Ref{Py}() -const cfg = Ref{Py}() -const utils = Ref{Py}() -const workflow = Ref{Py}() -const tasks = Ref{Py}() -const global_tasks = Ref{Py}() -const graphics = Ref{Py}() -const bedtopo = Ref{Py}() -const millan22 = Ref{Py}() -const MBsandbox = Ref{Py}() -const salem = Ref{Py}() - -# Essential Python libraries -const xr = Ref{Py}() -const rioxarray = Ref{Py}() -const pd = Ref{Py}() +const global prepro_dir::String = joinpath(homedir(), ".ODINN", "ODINN_prepro") # ############################################## # ########## SLEIPNIR LIBRARIES ############## diff --git a/src/glaciers/climate/Climate1D.jl b/src/glaciers/climate/Climate1D.jl index a04d7ba..8f4bb70 100644 --- a/src/glaciers/climate/Climate1D.jl +++ b/src/glaciers/climate/Climate1D.jl @@ -1,23 +1,33 @@ -@kwdef mutable struct Climate1Dstep{F <: AbstractFloat} +@kwdef mutable struct Climate1Dstep{F <: AbstractFloat} temp::Vector{F} PDD::Vector{F} snow::Vector{F} rain::Vector{F} - gradient::Ref{F} - avg_gradient::Ref{F} + gradient::F + avg_gradient::F x::Vector{F} y::Vector{F} - ref_hgt::Ref{F} + ref_hgt::F end -@kwdef mutable struct Climate1D{F <: AbstractFloat} - raw_climate::Py # Raw climate dataset for the whole simulation +Base.:(==)(a::Climate1Dstep, b::Climate1Dstep) = a.temp == b.temp && a.PDD == b.PDD && + a.snow == b.snow && a.rain == b.rain && + a.gradient == b.gradient && a.avg_gradient == b.avg_gradient && + a.x == b.x && a.y == b.y && a.ref_hgt == b.ref_hgt + +@kwdef mutable struct Climate1D{F <: AbstractFloat} + raw_climate::RasterStack # Raw climate dataset for the whole simulation # Buffers to avoid memory allocations - climate_raw_step::Ref{Py} # Raw climate trimmed for the current step - climate_step::Ref{Py} # Climate data for the current step + climate_raw_step::RasterStack # Raw climate trimmed for the current step + climate_step::Dict # Climate data for the current step climate_2D_step::Climate2Dstep # 2D climate data for the current step to feed to the MB model longterm_temps::Vector{F} # Longterm temperatures for the ice rheology - avg_temps::Ref{Py} # Intermediate buffer for computing average temperatures - avg_gradients::Ref{Py} # Intermediate buffer for computing average gradients + avg_temps::F # Intermediate buffer for computing average temperatures + avg_gradients::F # Intermediate buffer for computing average gradients end + +Base.:(==)(a::Climate1D, b::Climate1D) = a.raw_climate == b.raw_climate && a.climate_raw_step == b.climate_raw_step && + a.climate_step == b.climate_step && a.climate_2D_step == b.climate_2D_step && + a.longterm_temps == b.longterm_temps && a.avg_temps == b.avg_temps && + a.avg_gradients == b.avg_gradients diff --git a/src/glaciers/climate/Climate2D.jl b/src/glaciers/climate/Climate2D.jl index 6f55ab7..fb58cf1 100644 --- a/src/glaciers/climate/Climate2D.jl +++ b/src/glaciers/climate/Climate2D.jl @@ -1,25 +1,35 @@ export Climate2Dstep, Climate2D -@kwdef mutable struct Climate2Dstep{F <: AbstractFloat} +@kwdef mutable struct Climate2Dstep{F <: AbstractFloat} temp::Matrix{F} PDD::Matrix{F} snow::Matrix{F} rain::Matrix{F} - gradient::Ref{F} - avg_gradient::Ref{F} + gradient::F + avg_gradient::F x::Vector{F} y::Vector{F} - ref_hgt::Ref{F} + ref_hgt::F end -@kwdef mutable struct Climate2D{F <: AbstractFloat} - raw_climate::Py # Raw climate dataset for the whole simulation +Base.:(==)(a::Climate2Dstep, b::Climate2Dstep) = a.temp == b.temp && a.PDD == b.PDD && + a.snow == b.snow && a.rain == b.rain && + a.gradient == b.gradient && a.avg_gradient == b.avg_gradient && + a.x == b.x && a.y == b.y && a.ref_hgt == b.ref_hgt + +@kwdef mutable struct Climate2D{F <: AbstractFloat} + raw_climate::RasterStack # Raw climate dataset for the whole simulation # Buffers to avoid memory allocations - climate_raw_step::Ref{Py} # Raw climate trimmed for the current step - climate_step::Ref{Py} # Climate data for the current step + climate_raw_step::RasterStack # Raw climate trimmed for the current step + climate_step::Dict # Climate data for the current step climate_2D_step::Climate2Dstep # 2D climate data for the current step to feed to the MB model longterm_temps::Vector{F} # Longterm temperatures for the ice rheology - avg_temps::Ref{Py} # Intermediate buffer for computing average temperatures - avg_gradients::Ref{Py} # Intermediate buffer for computing average gradients + avg_temps::F # Intermediate buffer for computing average temperatures + avg_gradients::F # Intermediate buffer for computing average gradients end + +Base.:(==)(a::Climate2D, b::Climate2D) = a.raw_climate == b.raw_climate && a.climate_raw_step == b.climate_raw_step && + a.climate_step == b.climate_step && a.climate_2D_step == b.climate_2D_step && + a.longterm_temps == b.longterm_temps && a.avg_temps == b.avg_temps && + a.avg_gradients == b.avg_gradients diff --git a/src/glaciers/climate/climate2D_utils.jl b/src/glaciers/climate/climate2D_utils.jl index d82fb12..30ede76 100644 --- a/src/glaciers/climate/climate2D_utils.jl +++ b/src/glaciers/climate/climate2D_utils.jl @@ -7,47 +7,47 @@ export initialize_glacier_climate!, downscale_2D_climate!, downscale_2D_climate, get_cumulative_climate!, get_cumulative_climate, apply_t_cumul_grad!, apply_t_grad!, trim_period, partial_year, get_longterm_temps -using Dates # to provide correct Julian time slices +using Dates # to provide correct Julian time slices """ - function initialize_glacier_climate!(glacier::Glacier, params::Parameters) + function initialize_glacier_climate!(glacier::AbstractGlacier, params::Parameters) Initializes the `Climate` data structure for a given `Glacier`` """ function initialize_glacier_climate!(glacier::AbstractGlacier, params::Parameters) dummy_period = partial_year(Day, params.simulation.tspan[1]):Day(1):partial_year(Day, params.simulation.tspan[1] + params.simulation.step) - raw_climate = xr[].open_dataset(joinpath(pyconvert(String,glacier.gdir.dir), "raw_climate_$(params.simulation.tspan).nc")) - climate_step = Ref{Py}(get_cumulative_climate(raw_climate.sel(time=dummy_period))) - climate_2D_step = downscale_2D_climate(climate_step[], glacier) - longterm_temps = get_longterm_temps(glacier.gdir, raw_climate) + raw_climate = RasterStack(joinpath(prepro_dir, params.simulation.rgi_paths[glacier.rgi_id], "raw_climate_$(params.simulation.tspan).nc")) + climate_step = get_cumulative_climate(raw_climate[At(dummy_period)]) + climate_2D_step = downscale_2D_climate(climate_step, glacier) + longterm_temps = get_longterm_temps(glacier.rgi_id, params, raw_climate) glacier.climate = Climate2D(raw_climate = raw_climate, - climate_raw_step = Ref{Py}(raw_climate.sel(time=dummy_period)), + climate_raw_step = raw_climate[At(dummy_period)], #climate_cum_step = raw_climate.sel(time=dummy_period).sum(), climate_step = climate_step, climate_2D_step = climate_2D_step, - longterm_temps = pyconvert(Vector,longterm_temps), - avg_temps = Ref{Py}(raw_climate.sel(time=dummy_period).temp.mean()), - avg_gradients = Ref{Py}(raw_climate.sel(time=dummy_period).gradient.mean())) + longterm_temps = longterm_temps, + avg_temps = mean(raw_climate[At(dummy_period)].temp), + avg_gradients = mean(raw_climate[At(dummy_period)].gradient)) end -function generate_raw_climate_files(gdir::Py, tspan::Tuple{F, F}) where {F <: AbstractFloat} - if !ispath(joinpath(pyconvert(String,gdir.dir), "raw_climate_$tspan.nc")) - println("Getting raw climate data for: ", gdir.rgi_id) +function generate_raw_climate_files(rgi_id::String, simparams::SimulationParameters) + rgi_path = joinpath(prepro_dir, simparams.rgi_paths[rgi_id]) + if !ispath(joinpath(rgi_path, "raw_climate_$(simparams.tspan).nc")) + println("Getting raw climate data for: ", rgi_id) # Get raw climate data for gdir - tspan_date = partial_year(Day, tspan[1]):Day(1):partial_year(Day, tspan[2]) - climate = get_raw_climate_data(gdir) + tspan_date = partial_year(Day, simparams.tspan[1]):Day(1):partial_year(Day, simparams.tspan[2]) + climate = get_raw_climate_data(rgi_path) # Make sure the desired period is covered by the climate data - period = trim_period(tspan_date, climate) - if any((jldate(climate.time, 0) <= period[1]) & any(jldate(climate.time, -1) >= period[end])) - climate = climate.sel(time=period) # Crop desired time period + period = trim_period(tspan_date, climate) + if any((dims(climate, Ti)[begin] <= period[begin]) & any(dims(climate, Ti)[end] >= period[end])) + climate = climate[At(period)] # Crop desired time period else - @warn "No overlapping period available between climate tspan!" + @warn "No overlapping period available between climate tspan!" end - # Save raw gdir climate on disk - climate.to_netcdf(joinpath(pyconvert(String,gdir.dir), "raw_climate_$tspan.nc")) - climate.close() + # Save raw gdir climate on disk + write(joinpath(rgi_path, "raw_climate_$(simparams.tspan).nc"), climate) GC.gc() end end @@ -59,44 +59,46 @@ end Computes Positive Degree Days (PDDs) and cumulative rainfall and snowfall from climate data. """ function get_cumulative_climate!(climate, period, gradient_bounds=[-0.009, -0.003], default_grad=-0.0065) - climate.climate_raw_step[] = climate.raw_climate.sel(time=period) - climate.avg_temps[] = climate.climate_raw_step[].temp.mean() - - climate.avg_gradients[] = climate.climate_raw_step[].gradient.mean() - climate.climate_raw_step[].temp.data = climate.climate_raw_step[].temp.where(climate.climate_raw_step[].temp > 0.0, 0.0).data # get PDDs - climate.climate_raw_step[].gradient.data = utils[].clip_array(climate.climate_raw_step[].gradient.data, gradient_bounds[1], gradient_bounds[2]) # Clip gradients within plausible values - climate.climate_step[] = climate.climate_raw_step[].sum() # get monthly cumulative values - climate.climate_step[] = climate.climate_step[].assign(Dict("avg_temp"=>climate.avg_temps[])) - climate.climate_step[] = climate.climate_step[].assign(Dict("avg_gradient"=>climate.avg_gradients[])) - climate.climate_step[].attrs = climate.climate_raw_step[].attrs + climate.climate_raw_step = climate.raw_climate[At(period)] + climate.avg_temps = mean(climate.climate_raw_step.temp) + + climate.avg_gradients = mean(climate.climate_raw_step.gradient) + climate.climate_raw_step.temp.data .= max.(climate.climate_raw_step.temp, 0.0) # get PDDs + climate.climate_raw_step.gradient.data .= clamp.(climate.climate_raw_step.gradient.data, gradient_bounds[1], gradient_bounds[2]) # Clip gradients within plausible values + climate.climate_step["prcp"] = sum(climate.climate_raw_step.prcp) + climate.climate_step["temp"] = sum(climate.climate_raw_step.temp) + climate.climate_step["gradient"] = sum(climate.climate_raw_step.gradient) + climate.climate_step["avg_temp"] = climate.avg_temps + climate.climate_step["avg_gradient"] = climate.avg_gradients + climate.climate_step["ref_hgt"] = metadata(climate.climate_raw_step)["ref_hgt"] end function get_cumulative_climate(climate, gradient_bounds=[-0.009, -0.003], default_grad=-0.0065) - avg_temp = climate.temp.mean() - avg_gradients = climate.gradient.mean() - climate.temp.data = climate.temp.where(climate.temp > 0, 0).data # get PDDs - climate.gradient.data = utils[].clip_array(climate.gradient.data, gradient_bounds[1], gradient_bounds[2]) # Clip gradients within plausible values - attributes = climate.attrs - climate_sum = climate.sum() # get monthly cumulative values - climate_sum = climate_sum.assign(Dict("avg_temp"=>avg_temp)) - climate_sum = climate_sum.assign(Dict("avg_gradient"=>avg_gradients)) - climate_sum.attrs = attributes + avg_temp = mean(climate.temp) + avg_gradient = mean(climate.gradient) + copy_climate = deepcopy(climate) + copy_climate.temp.data .= max.(copy_climate.temp.data, 0.0) # get PDDs + copy_climate.gradient.data .= clamp.(copy_climate.gradient.data, gradient_bounds[1], gradient_bounds[2]) # Clip gradients within plausible values + climate_sum = Dict("temp" => sum(copy_climate.temp), + "prcp" => sum(climate.prcp), + "gradient" => sum(copy_climate.gradient), + "avg_temp" => avg_temp, + "avg_gradient" => avg_gradient, + "ref_hgt" => metadata(climate)["ref_hgt"]) return climate_sum end """ - get_raw_climate_data(gdir, temp_resolution="daily", climate="W5E5") + get_raw_climate_data(rgi_path::String) -Downloads the raw W5E5 climate data with a given resolution (daily by default). Returns an xarray Dataset. +Load the netCDF file containing the climate data for that glacier. """ -function get_raw_climate_data(gdir; temp_resolution="daily", climate="W5E5") - MBsandbox[].process_w5e5_data(gdir, climate_type=climate, temporal_resol=temp_resolution) - fpath = gdir.get_filepath("climate_historical", filesuffix="_daily_W5E5") - climate = xr[].open_dataset(fpath) +function get_raw_climate_data(rgi_path::String) + climate = RasterStack(joinpath(rgi_path, "climate_historical_daily_W5E5.nc")) return climate end -# TODO: make snow/rain thresholds customizable +# TODO: make snow/rain thresholds customizable function apply_t_cumul_grad!(climate_2D_step::Climate2Dstep, S::Matrix{F}) where {F <: AbstractFloat} # We apply the gradients to the temperature climate_2D_step.temp .= climate_2D_step.temp .+ climate_2D_step.avg_gradient .* (S .- climate_2D_step.ref_hgt) @@ -104,98 +106,83 @@ function apply_t_cumul_grad!(climate_2D_step::Climate2Dstep, S::Matrix{F}) where climate_2D_step.PDD .= ifelse.(climate_2D_step.PDD .< 0.0, 0.0, climate_2D_step.PDD) # Crop negative PDD values # We adjust the rain/snow fractions with the updated temperature - climate_2D_step.snow .= ifelse.(climate_2D_step.temp .> 0.0, 0.0, climate_2D_step.snow) + climate_2D_step.snow .= ifelse.(climate_2D_step.temp .> 0.0, 0.0, climate_2D_step.snow) climate_2D_step.rain .= ifelse.(climate_2D_step.temp .< 0.0, 0.0, climate_2D_step.rain) end """ - apply_t_grad!(climate, g_dem) + apply_t_grad!(climate::RasterStack, dem::Raster) -Applies temperature gradients to the glacier 2D climate data based on a DEM. +Applies temperature gradients to the glacier 2D climate data based on a DEM. """ -function apply_t_grad!(climate::Py, dem) +function apply_t_grad!(climate::RasterStack, dem::Raster) # We apply the gradients to the temperature - # /!\ AVOID USING `.=` IN JULIA TO ASSIGN. IT'S NOT HANDLED BY XARRAY. USE `=` INSTEAD - climate.temp.data = climate.temp.data + climate.gradient.data .* (mean(dem.data.flatten()) .- climate.ref_hgt) + climate.temp.data .= climate.temp.data .+ climate.gradient.data .* (mean(dem.data[:]) .- metadata(climate)["ref_hgt"]) end """ - downscale_2D_climate(climate, g_dem) + downscale_2D_climate(glacier::Glacier2D) Projects climate data to the glacier matrix by simply copying the closest gridpoint to all matrix gridpoints. -Generates a new xarray Dataset which is returned. +Generates a new RasterStack which is returned. """ function downscale_2D_climate!(glacier::Glacier2D) # Update 2D climate structure climate = glacier.climate - FT = eltype(glacier.S[1]) - - climate.climate_2D_step.temp .= pyconvert(FT,climate.climate_step[].avg_temp.data[()]) - climate.climate_2D_step.PDD .= pyconvert(FT,climate.climate_step[].temp.data[()]) - climate.climate_2D_step.snow .= pyconvert(FT,climate.climate_step[].prcp.data[()]) - climate.climate_2D_step.rain .= pyconvert(FT,climate.climate_step[].prcp.data[()]) + climate.climate_2D_step.temp .= climate.climate_step["avg_temp"] + climate.climate_2D_step.PDD .= climate.climate_step["temp"] + climate.climate_2D_step.snow .= climate.climate_step["prcp"] + climate.climate_2D_step.rain .= climate.climate_step["prcp"] # Update gradients - climate.climate_2D_step.gradient[] = pyconvert(FT,climate.climate_step[].gradient.data[()]) - climate.climate_2D_step.avg_gradient[] = pyconvert(FT,climate.climate_step[].avg_gradient.data[()]) + climate.climate_2D_step.gradient = climate.climate_step["gradient"] + climate.climate_2D_step.avg_gradient = climate.climate_step["avg_gradient"] # Apply temperature gradients and compute snow/rain fraction for the selected period - apply_t_cumul_grad!(climate.climate_2D_step, reshape(glacier.S, size(glacier.S))) # Reproject current S with xarray structure + apply_t_cumul_grad!(climate.climate_2D_step, reshape(glacier.S, size(glacier.S))) # Reproject current S with the RasterStack structure end -function downscale_2D_climate(climate_step::Py, glacier::Glacier2D) +function downscale_2D_climate(climate_step::Dict, glacier::Glacier2D) # Create dummy 2D arrays to have a base to apply gradients afterwards FT = typeof(glacier.S[1]) - dummy_grid = ones(size(glacier.S)) - temp_2D = pyconvert(FT, climate_step.avg_temp.data[()]) .* dummy_grid - PDD_2D = pyconvert(FT, climate_step.temp.data[()]) .* dummy_grid - snow_2D = pyconvert(FT, climate_step.prcp.data[()]) .* dummy_grid - rain_2D = pyconvert(FT, climate_step.prcp.data[()]) .* dummy_grid - - # We generate a new dataset with the scaled data + dummy_grid = zeros(size(glacier.S)) + temp_2D = climate_step["avg_temp"] .+ dummy_grid + PDD_2D = climate_step["temp"] .+ dummy_grid + snow_2D = climate_step["prcp"] .+ dummy_grid + rain_2D = climate_step["prcp"] .+ dummy_grid climate_2D_step = Climate2Dstep(temp=temp_2D, - PDD=PDD_2D, - snow=snow_2D, - rain=rain_2D, - gradient=Ref{FT}(pyconvert(FT,climate_step.gradient.data[()])), - avg_gradient=Ref{FT}(pyconvert(FT,climate_step.avg_gradient.data[()])), - x=pyconvert(Vector{FT},glacier.S_coords.x.data), - y=pyconvert(Vector{FT},glacier.S_coords.y.data), - ref_hgt=Ref{FT}(pyconvert(FT,climate_step.ref_hgt))) + PDD=PDD_2D, + snow=snow_2D, + rain=rain_2D, + gradient=Float64(climate_step["gradient"]), + avg_gradient=Float64(climate_step["avg_gradient"]), + x=glacier.S_coords["x"], + y=glacier.S_coords["y"], + ref_hgt=Float64(climate_step["ref_hgt"])) # Apply temperature gradients and compute snow/rain fraction for the selected period apply_t_cumul_grad!(climate_2D_step, reshape(glacier.S, size(glacier.S))) # Reproject current S with xarray structure - + return climate_2D_step end function downscale_2D_climate(glacier::Glacier2D) - climate_2D_step = downscale_2D_climate(glacier.climate.climate_step[], glacier) + climate_2D_step = downscale_2D_climate(glacier.climate.climate_step, glacier) return climate_2D_step end -""" - jldate(pydate, 0) - -Converts a Python date (generally from xarray) to a Julia `Date`. WARNING: it requires Python indices, e.g. -0 for the beginning and -1 for the end. -""" -function jldate(pydate, idx) - return Date(pyconvert(Int, pydate.dt.year.data[idx]), pyconvert(Int,pydate.dt.month.data[idx]), pyconvert(Int,pydate.dt.day.data[idx])) -end - """ trim_period(period, climate) Trims a time period based on the time range of a climate series. """ function trim_period(period, climate) - if any(jldate(climate.time, 0) > period[1]) - head = jldate(climate.time, 0) + head = dims(climate, Ti)[begin] + if head > period[begin] period = Date(year(head), 10, 1):Day(1):period[end] # make it a hydrological year end - if any(jldate(climate.time, -1) > period[end]) - tail = jldate(climate.time, -1) + tail = dims(climate, Ti)[end] + if tail > period[end] period = period[1]:Day(1):Date(year(tail), 9, 30) # make it a hydrological year end @@ -209,21 +196,22 @@ function partial_year(period::Type{<:Period}, float) partial = period(round(Dates.value(year) * Δ)) year_start + partial end -partial_year(float) = partial_year(Day, float) +partial_year(float) = partial_year(Day, float) -function get_longterm_temps(gdir::Py, tspan) - climate = xr[].open_dataset(joinpath(gdir.dir, "raw_climate_$tspan.nc")) # load only once at the beginning - dem = rioxarray[].open_rasterio(gdir.get_filepath("dem")) - apply_t_grad!(climate, dem) - longterm_temps = climate.groupby("time.year").mean().temp.data +function get_longterm_temps(rgi_id::String, params::Parameters) + rgi_path = joinpath(prepro_dir, params.simulation.rgi_paths[rgi_id]) + glacier_gd = RasterStack(joinpath(rgi_path, "gridded_data.nc")) + climate = RasterStack(joinpath(rgi_path, "raw_climate_$(params.simulation.tspan).nc")) + apply_t_grad!(climate, glacier_gd.topo) + longterm_temps = mean.(groupby(climate.temp, Ti=>year)).data return longterm_temps end -function get_longterm_temps(gdir::Py, climate::Py) - dem = rioxarray[].open_rasterio(gdir.get_filepath("dem")) - apply_t_grad!(climate, dem) - longterm_temps = climate.groupby("time.year").mean().temp.data +function get_longterm_temps(rgi_id::String, params::Parameters, climate::RasterStack) + glacier_gd = RasterStack(joinpath(prepro_dir, params.simulation.rgi_paths[rgi_id], "gridded_data.nc")) + apply_t_grad!(climate, glacier_gd.topo) + longterm_temps = mean.(groupby(climate.temp, Ti=>year)).data return longterm_temps end diff --git a/src/glaciers/glacier/Glacier1D.jl b/src/glaciers/glacier/Glacier1D.jl index e98f51d..91bb49f 100644 --- a/src/glaciers/glacier/Glacier1D.jl +++ b/src/glaciers/glacier/Glacier1D.jl @@ -1,13 +1,12 @@ export Glacier1D, Climate1D, AbstractGlacier -abstract type AbstractGlacier end +abstract type AbstractGlacier end include("../climate/Climate1D.jl") mutable struct Glacier1D{F <: AbstractFloat, I <: Integer} <: AbstractGlacier rgi_id::Union{String, Nothing} - gdir::Union{Py, Nothing} climate::Union{Climate1D, Nothing} H₀::Union{Vector{F}, Nothing} S::Union{Vector{F}, Nothing} @@ -20,7 +19,7 @@ mutable struct Glacier1D{F <: AbstractFloat, I <: Integer} <: AbstractGlacier λ::Union{Vector{F}, Nothing} slope::Union{Vector{F}, Nothing} dist_border::Union{Vector{F}, Nothing} - S_coords::Union{Py, Nothing} + S_coords::Union{Dict{String, Vector{Float64}}, Nothing} Δx::Union{F, Nothing} Δy::Union{F, Nothing} nx::Union{I, Nothing} @@ -30,26 +29,29 @@ end """ function Glacier1D(; rgi_id::Union{String, Nothing} = nothing, - gdir::Union{Py, Nothing} = nothing, climate::Union{Climate1D, Nothing} = nothing, H₀::Union{Vector{F}, Nothing} = nothing, S::Union{Vector{F}, Nothing} = nothing, B::Union{Vector{F}, Nothing} = nothing, V::Union{Vector{F}, Nothing}= nothing, + A::Union{F, Nothing} = nothing, + C::Union{F, Nothing} = nothing, + n::Union{F, Nothing} = nothing, + w₀::Union{Vector{F}, Nothing} = nothing, + λ::Union{Vector{F}, Nothing} = nothing, slope::Union{Vector{F}, Nothing} = nothing, dist_border::Union{Vector{F}, Nothing} = nothing, - S_coords::Union{Py, Nothing} = nothing, + S_coords::Union{Dict{String, Vector{Float64}}, Nothing} = nothing, Δx::Union{F, Nothing} = nothing, Δy::Union{F, Nothing} = nothing, nx::Union{I, Nothing} = nothing, ny::Union{I, Nothing} = nothing - ) where {F <: AbstractFloat, I <: Integer} + ) where {F <: AbstractFloat, I <: Integer} Constructor for empty 2D Glacier object. """ function Glacier1D(; rgi_id::Union{String, Nothing} = nothing, - gdir::Union{Py, Nothing} = nothing, climate::Union{Climate1D, Nothing} = nothing, H₀::Union{Vector{F}, Nothing} = nothing, S::Union{Vector{F}, Nothing} = nothing, @@ -62,12 +64,12 @@ function Glacier1D(; λ::Union{Vector{F}, Nothing} = nothing, slope::Union{Vector{F}, Nothing} = nothing, dist_border::Union{Vector{F}, Nothing} = nothing, - S_coords::Union{Py, Nothing} = nothing, + S_coords::Union{Dict{String, Vector{Float64}}, Nothing} = nothing, Δx::Union{F, Nothing} = nothing, Δy::Union{F, Nothing} = nothing, nx::Union{I, Nothing} = nothing, ny::Union{I, Nothing} = nothing - ) where {F <: AbstractFloat, I <: Integer} + ) where {F <: AbstractFloat, I <: Integer} # Define default float and integer type for constructor ft = Float64 @@ -79,7 +81,7 @@ end ################### UTILS ##################### ############################################### -Base.:(==)(a::Glacier1D, b::Glacier1D) = a.rgi_id == b.rgi_id && a.gdir == b.gdir && a.climate == b.climate && +Base.:(==)(a::Glacier1D, b::Glacier1D) = a.rgi_id == b.rgi_id && a.gdir == b.gdir && a.climate == b.climate && a.H₀ == b.H₀ && a.S == b.S && a.B == b.B && a.V == b.V && a.A == b.A && a.C == b.C && a.n == b.n && a.w₀ == b.w₀ && a.λ == b.λ && a.slope == b.slope && a.dist_border == b.dist_border && a.rgi_id == b.rgi_id && diff --git a/src/glaciers/glacier/Glacier2D.jl b/src/glaciers/glacier/Glacier2D.jl index 9c39483..1bc1035 100644 --- a/src/glaciers/glacier/Glacier2D.jl +++ b/src/glaciers/glacier/Glacier2D.jl @@ -1,13 +1,12 @@ export Glacier2D, Climate2D -abstract type AbstractGlacier end +abstract type AbstractGlacier end include("../climate/Climate2D.jl") mutable struct Glacier2D{F <: AbstractFloat, I <: Integer} <: AbstractGlacier rgi_id::Union{String, Nothing} - gdir::Union{Py, Nothing} climate::Union{Climate2D, Nothing} H₀::Union{Matrix{F}, Nothing} H_glathida::Union{Matrix{F}, Nothing} @@ -21,7 +20,7 @@ mutable struct Glacier2D{F <: AbstractFloat, I <: Integer} <: AbstractGlacier n::Union{F, Nothing} slope::Union{Matrix{F}, Nothing} dist_border::Union{Matrix{F}, Nothing} - S_coords::Union{Py, Nothing} + S_coords::Union{Dict{String, Vector{Float64}}, Nothing} Δx::Union{F, Nothing} Δy::Union{F, Nothing} nx::Union{I, Nothing} @@ -33,27 +32,32 @@ end """ function Glacier2D(; rgi_id::Union{String, Nothing} = nothing, - gdir::Union{Py, Nothing} = nothing, climate::Union{Climate2D, Nothing} = nothing, H₀::Union{Matrix{F}, Nothing} = nothing, - H_glathida::Union{Matrix{F}, Nothing}, + H_glathida::Union{Matrix{F}, Nothing} = nothing, S::Union{Matrix{F}, Nothing} = nothing, B::Union{Matrix{F}, Nothing} = nothing, V::Union{Matrix{F}, Nothing}= nothing, + Vx::Union{Matrix{F}, Nothing}= nothing, + Vy::Union{Matrix{F}, Nothing}= nothing, + A::Union{F, Nothing} = nothing, + C::Union{F, Nothing} = nothing, + n::Union{F, Nothing} = nothing, slope::Union{Matrix{F}, Nothing} = nothing, dist_border::Union{Matrix{F}, Nothing} = nothing, - S_coords::Union{Py, Nothing} = nothing, + S_coords::Union{Dict{String, Vector{Float64}}, Nothing} = nothing, Δx::Union{F, Nothing} = nothing, Δy::Union{F, Nothing} = nothing, nx::Union{I, Nothing} = nothing, - ny::Union{I, Nothing} = nothing - ) where {F <: AbstractFloat, I <: Integer} + ny::Union{I, Nothing} = nothing, + cenlon::Union{F, Nothing} = nothing, + cenlat::Union{F, Nothing} = nothing + ) where {F <: AbstractFloat, I <: Integer} Constructor for empty 2D Glacier object. """ function Glacier2D(; rgi_id::Union{String, Nothing} = nothing, - gdir::Union{Py, Nothing} = nothing, climate::Union{Climate2D, Nothing} = nothing, H₀::Union{Matrix{F}, Nothing} = nothing, H_glathida::Union{Matrix{F}, Nothing} = nothing, @@ -67,20 +71,20 @@ function Glacier2D(; n::Union{F, Nothing} = nothing, slope::Union{Matrix{F}, Nothing} = nothing, dist_border::Union{Matrix{F}, Nothing} = nothing, - S_coords::Union{Py, Nothing} = nothing, + S_coords::Union{Dict{String, Vector{Float64}}, Nothing} = nothing, Δx::Union{F, Nothing} = nothing, Δy::Union{F, Nothing} = nothing, nx::Union{I, Nothing} = nothing, ny::Union{I, Nothing} = nothing, cenlon::Union{F, Nothing} = nothing, cenlat::Union{F, Nothing} = nothing - ) where {F <: AbstractFloat, I <: Integer} + ) where {F <: AbstractFloat, I <: Integer} # Define default float and integer type for constructor ft = typeof(Δx) it = typeof(nx) - - return Glacier2D{ft,it}(rgi_id, gdir, climate, H₀, H_glathida, S, B, V, Vx, Vy, A, C, n, slope, dist_border, S_coords, Δx, Δy, nx, ny, cenlon, cenlat) + + return Glacier2D{ft,it}(rgi_id, climate, H₀, H_glathida, S, B, V, Vx, Vy, A, C, n, slope, dist_border, S_coords, Δx, Δy, nx, ny, cenlon, cenlat) end ############################################### @@ -88,21 +92,21 @@ end ############################################### -Base.:(==)(a::Glacier2D, b::Glacier2D) = a.rgi_id == b.rgi_id && a.gdir == b.gdir && a.climate == b.climate && +Base.:(==)(a::Glacier2D, b::Glacier2D) = a.rgi_id == b.rgi_id && a.climate == b.climate && a.H₀ == b.H₀ && a.H_glathida == b.H_glathida && a.S == b.S && a.B == b.B && a.V == b.V && - a.A == b.A && a.C == b.C && a.n == b.n && - a.slope == b.slope && a.dist_border == b.dist_border && + a.A == b.A && a.C == b.C && a.n == b.n && + a.slope == b.slope && a.dist_border == b.dist_border && a.S_coords == b.S_coords && a.Δx == b.Δx && a.Δy == b.Δy && a.nx == b.nx && a.ny == b.ny && a.cenlon == b.cenlon && a.cenlat == b.cenlat -Base.:(≈)(a::Glacier2D, b::Glacier2D) = a.rgi_id == b.rgi_id && a.gdir == b.gdir && a.climate == b.climate && - safe_approx(a.H₀, b.H₀) && safe_approx(a.H_glathida, b.H_glathida) && +Base.:(≈)(a::Glacier2D, b::Glacier2D) = a.rgi_id == b.rgi_id && a.climate == b.climate && + safe_approx(a.H₀, b.H₀) && safe_approx(a.H_glathida, b.H_glathida) && safe_approx(a.S, b.S) && safe_approx(a.B, b.B) && safe_approx(a.V, b.V) && safe_approx(a.A, b.A) && safe_approx(a.C, b.C) && safe_approx(a.n, b.n) && isapprox(a.slope, b.slope; rtol=1e-3) && safe_approx(a.dist_border, b.dist_border) && - a.S_coords == b.S_coords && safe_approx(a.Δx, b.Δx) && safe_approx(a.Δy, b.Δy) && - safe_approx(a.nx, b.nx) && safe_approx(a.ny, b.ny) && + safe_approx(a.S_coords, b.S_coords) && safe_approx(a.Δx, b.Δx) && safe_approx(a.Δy, b.Δy) && + safe_approx(a.nx, b.nx) && safe_approx(a.ny, b.ny) && safe_approx(a.cenlon, b.cenlon) && safe_approx(a.cenlat, b.cenlat) include("glacier2D_utils.jl") diff --git a/src/glaciers/glacier/glacier2D_utils.jl b/src/glaciers/glacier/glacier2D_utils.jl index 046fb6a..2a11c5b 100644 --- a/src/glaciers/glacier/glacier2D_utils.jl +++ b/src/glaciers/glacier/glacier2D_utils.jl @@ -6,14 +6,13 @@ export initialize_glaciers ############################################### """ - initialize_glaciers(rgi_ids::Vector{String}, params::Parameters; velocities=true) + initialize_glaciers(rgi_ids::Vector{String}, params::Parameters; test=false) + +Initialize multiple `Glacier`s based on a list of RGI IDs and on parameters. -Initialize multiple `Glacier`s based on a list of RGI IDs, a º span for a simulation and step. - Keyword arguments ================= - `rgi_ids`: List of RGI IDs of glaciers - - `tspan`: Tuple specifying the initial and final year of the simulation - `params`: `Parameters` object to be passed """ function initialize_glaciers(rgi_ids::Vector{String}, params::Parameters; test=false) @@ -28,26 +27,22 @@ function initialize_glaciers(rgi_ids::Vector{String}, params::Parameters; test=f missing_glaciers = Vector([]) jldsave(joinpath(params.simulation.working_dir, "data/missing_glaciers.jld2"); missing_glaciers) end - filter_missing_glaciers!(pyconvert(Vector{String},rgi_ids), params) + filter_missing_glaciers!(rgi_ids, params) - # Initialize glacier directories - gdirs = init_gdirs(rgi_ids, params; velocities=params.simulation.velocities) - # Generate raw climate data if necessary if params.simulation.test_mode - map((gdir) -> generate_raw_climate_files(gdir, params.simulation.tspan), PyList(gdirs)) # avoid GitHub CI issue + map((rgi_id) -> generate_raw_climate_files(rgi_id, params.simulation), rgi_ids) # avoid GitHub CI issue else - pmap((gdir) -> generate_raw_climate_files(gdir, params.simulation.tspan), PyList(gdirs)) + pmap((rgi_id) -> generate_raw_climate_files(rgi_id, params.simulation), rgi_ids) end - glaciers = pmap((gdir) -> initialize_glacier(gdir, params; smoothing=false, test=test), PyList(gdirs)) + glaciers = pmap((rgi_id) -> initialize_glacier(rgi_id, params; smoothing=false, test=test), rgi_ids) if params.simulation.use_glathida_data == true - data_glathida, glathida_rgi_ids = get_glathida_path_and_IDs() # Obtain H_glathida values for the valid RGI IDs - H_glathida_values, valid_gdirs = get_glathida!(data_glathida, PyList(gdirs), params) - valid_rgi_ids = [gdir.rgi_id for gdir in valid_gdirs] + H_glathida_values, valid_glaciers = get_glathida!(glaciers, params) + valid_rgi_ids = [glacier.rgi_id for glacier in valid_glaciers] if isempty(valid_rgi_ids) error("None of the provided RGI IDs have GlaThiDa.") @@ -73,27 +68,26 @@ end """ - initialize_glacier(gdir::Py, tspan, step; smoothing=false, velocities=true) + initialize_glacier(rgi_id::String, parameters::Parameters; smoothing=false, velocities=true) + +Initialize a single `Glacier`s, including its `Climate`, based on a `rgi_id` and timestepping arguments. -Initialize a single `Glacier`s, including its `Climate`, based on a `gdir` and timestepping arguments. - Keyword arguments ================= - - `gdir`: Glacier directory - - `tspan`: Tuple specifying the initial and final year of the simulation - - `step`: Step in years for the surface mass balance processing + - `rgi_id`: Glacier RGI ID + - `parameters`: Parameters including the physical and simulation ones - `smoothing` Flag determining if smoothing needs to be applied to the surface elevation and ice thickness. - `velocities` Flag determining if the ice surface velocities need to be retrieved. """ -function initialize_glacier(gdir::Py, parameters; smoothing=false, test=false) +function initialize_glacier(rgi_id::String, parameters::Parameters; smoothing=false, test=false) # Initialize glacier initial topography - glacier = initialize_glacier_data(gdir, parameters; smoothing=smoothing, test=test) + glacier = initialize_glacier_data(rgi_id, parameters; smoothing=smoothing, test=test) # Initialize glacier climate initialize_glacier_climate!(glacier, parameters) if test - glacier.gdir = nothing + glacier.rgi_id = nothing # not sure of that line glacier.S_coords = nothing end @@ -101,52 +95,48 @@ function initialize_glacier(gdir::Py, parameters; smoothing=false, test=false) end """ - initialize_glacier(gdir::Py; smoothing=false, velocities=true) + initialize_glacier(rgi_id::String, params::Parameters; smoothing=false, velocities=true) Retrieves the initial glacier geometry (bedrock + ice thickness) for a glacier with other necessary data (e.g. grid size and ice surface velocities). """ -function initialize_glacier_data(gdir::Py, params::Parameters; smoothing=false, test=false) +function initialize_glacier_data(rgi_id::String, params::Parameters; smoothing=false, test=false) # Load glacier gridded data F = params.simulation.float_type I = params.simulation.int_type - glacier_gd = xr[].open_dataset(gdir.get_filepath("gridded_data")) + rgi_path = joinpath(prepro_dir, params.simulation.rgi_paths[rgi_id]) + glacier_gd = RasterStack(joinpath(rgi_path, "gridded_data.nc")) + glacier_grid = JSON.parsefile(joinpath(rgi_path, "glacier_grid.json")) # println("Using $ice_thickness_source for initial state") # Retrieve initial conditions from OGGM # initial ice thickness conditions for forward model - if params.OGGM.ice_thickness_source == "Millan22" && params.simulation.velocities - H₀ = F.(ifelse.(pyconvert(Matrix,glacier_gd.glacier_mask.data) .== 1, pyconvert(Matrix,glacier_gd.millan_ice_thickness.data), 0.0)) - elseif params.OGGM.ice_thickness_source == "Farinotti19" - H₀ = F.(ifelse.(pyconvert(Matrix,glacier_gd.glacier_mask.data) .== 1, pyconvert(Matrix,glacier_gd.consensus_ice_thickness.data), 0.0)) + if params.simulation.ice_thickness_source == "Millan22" && params.simulation.velocities + H₀ = F.(ifelse.(glacier_gd.glacier_mask.data .== 1, glacier_gd.millan_ice_thickness.data, 0.0)) + elseif params.simulation.ice_thickness_source == "Farinotti19" + H₀ = F.(ifelse.(glacier_gd.glacier_mask.data .== 1, glacier_gd.consensus_ice_thickness.data, 0.0)) end fillNaN!(H₀) # Fill NaNs with 0s to have real boundary conditions - if smoothing + if smoothing println("Smoothing is being applied to initial condition.") smooth!(H₀) # Smooth initial ice thickness to help the solver end - # Create path for simulation results - gdir_path = dirname(pyconvert(String, gdir.get_filepath("dem"))) - if !isdir(gdir_path) - mkdir(gdir_path) - end - try # We filter glacier borders in high elevations to avoid overflow problems - dist_border = pyconvert(Matrix{F}, glacier_gd.dis_from_border.data) + dist_border::Matrix{Float64} = glacier_gd.dis_from_border.data # H_mask = (dist_border .< 20.0) .&& (S .> maximum(S)*0.7) # H₀[H_mask] .= 0.0 - B = pyconvert(Matrix{F}, glacier_gd.topo.data) .- H₀ # bedrock - S_coords = rioxarray[].open_rasterio(gdir.get_filepath("dem")) - #S = pyconvert(Matrix{F}, glacier_gd.topo.data) - S = pyconvert(Matrix{F}, S_coords.values[0]) # surface elevation - #smooth!(S) + B = glacier_gd.topo.data .- H₀ # bedrock + S_coords = Dict{String,Vector{Float64}}("x"=> dims(glacier_gd, 1).val, "y"=> dims(glacier_gd, 2).val) + S::Matrix{Float64} = glacier_gd.topo.data + #smooth!(S) + if params.simulation.velocities - V = ifelse.(pyconvert(Matrix{F}, glacier_gd.glacier_mask.data) .== 1, pyconvert(Matrix{F}, glacier_gd.millan_v.data), 0.0) - Vx = ifelse.(pyconvert(Matrix{F}, glacier_gd.glacier_mask.data) .== 1, pyconvert(Matrix{F}, glacier_gd.millan_vx.data), 0.0) - Vy = ifelse.(pyconvert(Matrix{F}, glacier_gd.glacier_mask.data) .== 1, pyconvert(Matrix{F}, glacier_gd.millan_vy.data), 0.0) + V::Matrix{Float64} = ifelse.(glacier_gd.glacier_mask.data .== 1, glacier_gd.millan_v.data, 0.0) + Vx::Matrix{Float64} = ifelse.(glacier_gd.glacier_mask.data .== 1, glacier_gd.millan_vx.data, 0.0) + Vy::Matrix{Float64} = ifelse.(glacier_gd.glacier_mask.data .== 1, glacier_gd.millan_vy.data, 0.0) fillNaN!(V) fillNaN!(Vx) fillNaN!(Vy) @@ -155,164 +145,72 @@ function initialize_glacier_data(gdir::Py, params::Parameters; smoothing=false, Vx = zeros(F, size(H₀)) Vy = zeros(F, size(H₀)) end - nx = pyconvert(I, glacier_gd.y.size) # glacier extent - ny = pyconvert(I, glacier_gd.x.size) # really weird, but this is inversed - Δx = abs(pyconvert(F, gdir.grid.dx)) - Δy = abs(pyconvert(F, gdir.grid.dy)) - slope = pyconvert(Matrix{F},glacier_gd.slope.data) + nx = glacier_grid["nxny"][1] + ny = glacier_grid["nxny"][2] + Δx = abs.(glacier_grid["dxdy"][1]) + Δy = abs.(glacier_grid["dxdy"][2]) + slope::Matrix{Float64} = glacier_gd.slope.data - glacier_gd.close() # Release any resources linked to this object # We initialize the Glacier with all the initial topographical - glacier = Glacier2D(rgi_id = pyconvert(String,gdir.rgi_id), gdir = gdir, + glacier = Glacier2D(rgi_id = rgi_id, climate=nothing, H₀ = H₀, S = S, B = B, V = V, Vx = Vx, Vy = Vy, A = 4e-17, C = 0.0, n = 3.0, slope = slope, dist_border = dist_border, S_coords = S_coords, Δx=Δx, Δy=Δy, nx=nx, ny=ny, - cenlon = pyconvert(F,gdir.cenlon), cenlat = pyconvert(F,gdir.cenlat)) + cenlon = glacier_grid["x0y0"][1] , cenlat = glacier_grid["x0y0"][2]) return glacier catch error @show error missing_glaciers = load(joinpath(params.simulation.working_dir, "data/missing_glaciers.jld2"))["missing_glaciers"] - push!(missing_glaciers, pyconvert(String, gdir.rgi_id)) + push!(missing_glaciers, rgi_id) jldsave(joinpath(params.simulation.working_dir, "data/missing_glaciers.jld2"); missing_glaciers) - glacier_gd.close() # Release any resources linked to this object - @warn "Glacier without data: $(gdir.rgi_id). Updating list of missing glaciers. Please try again." - end -end - -""" - init_gdirs(rgi_ids; force=false) - -Initializes Glacier Directories using OGGM. Wrapper function calling `init_gdirs_scratch(rgi_ids)`. -""" -function init_gdirs(rgi_ids::Vector{String}, params::Parameters; velocities=true) - # Try to retrieve glacier gdirs if they are available - filter_missing_glaciers!(rgi_ids, params) - try - gdirs = workflow[].init_glacier_directories(rgi_ids) - filter_missing_glaciers!(PyList(gdirs), params) - - # Set different surface topography source if specified - if params.OGGM.DEM_source != "Default" - for gdir in gdirs - tasks[].define_glacier_region(gdir, source = params.OGGM.DEM_source) - end - end - - return gdirs - catch - @warn "Cannot retrieve gdirs from disk!" - println("Generating gdirs from scratch...") - # Generate all gdirs if needed - gdirs = init_gdirs_scratch(rgi_ids, params; velocities = velocities) - # Check which gdirs errored in the tasks[] (useful to filter those gdirs) - filter_missing_glaciers!(PyList(gdirs), params) - return gdirs - end -end - -""" - init_gdirs_scratch(rgi_ids) - -Initializes Glacier Directories from scratch using OGGM. -""" -function init_gdirs_scratch(rgi_ids::Vector{String}, params::Parameters; velocities=true)::Py - # Check if some of the gdirs is missing files - @infiltrate - gdirs = workflow[].init_glacier_directories(pyjl(rgi_ids), prepro_base_url=params.OGGM.base_url, - from_prepro_level=2, prepro_border=10, - reset=true, force=true) - - # Set different surface topography source if specified - if params.OGGM.DEM_source != "Default" - for gdir in PyList(gdirs) - tasks[].define_glacier_region(gdir, source = params.OGGM.DEM_source) - end - end - - - if velocities - list_talks = [ - # tasks[].compute_centerlines, - # tasks[].initialize_flowlines, - # tasks[].compute_downstream_line, - # tasks[].catchment_area, - # tasks[].process_dem, - tasks[].gridded_attributes, - tasks[].glacier_masks, - # tasks[].gridded_mb_attributes, - # tasks[].prepare_for_inversion, # This is a preprocessing task - # tasks[].mass_conservation_inversion, # This gdirsdoes the actual job - # tasks[].filter_inversion_output, # This smoothes the thicknesses at the tongue a little - # tasks[].distribute_thickness_per_altitude, - bedtopo[].add_consensus_thickness, # Use consensus ice thicknesses from Farinotti et al. (2019) - # tasks[].get_topo_predictors, - millan22[].thickness_to_gdir, - millan22[].velocity_to_gdir - ] - else - list_talks = [ - tasks[].gridded_attributes, - tasks[].glacier_masks, - bedtopo[].add_consensus_thickness # Use consensus ice thicknesses from Farinotti et al. (2019) - ] - end - - for task in list_talks - # The order matters! - workflow[].execute_entity_task(task, gdirs) + @warn "Glacier without data: $rgi_id. Updating list of missing glaciers. Please try again." end - GC.gc() - - return gdirs end # [Begin] Glathida Utilities -function get_glathida!(gtd_file, gdirs::Vector{Py}, params; force=false) - glathida = pd[].HDFStore(gtd_file) - gtd_grids = map(gdir -> get_glathida_glacier(gdir, glathida, force), gdirs) +function get_glathida!(glaciers::Vector{Glacier2D}, params::Parameters; force=false) + gtd_grids = pmap(glacier -> get_glathida_glacier(glacier, params, force), glaciers) # Update missing_glaciers list before removing them missing_glaciers = load(joinpath(params.simulation.working_dir, "data/missing_glaciers.jld2"))["missing_glaciers"] - for (gtd_grid, gdir) in zip(gtd_grids, gdirs) - if (length(gtd_grid[gtd_grid .!= 0.0]) == 0) && all(gdir.rgi_id .!= missing_glaciers) - push!(missing_glaciers, gdir.rgi_id) - @info "Glacier with all data at 0: $(gdir.rgi_id). Updating list of missing glaciers..." + for (gtd_grid, glacier) in zip(gtd_grids, glaciers) + if (length(gtd_grid[gtd_grid .!= 0.0]) == 0) && all(glacier.rgi_id .!= missing_glaciers) + push!(missing_glaciers, glacier.rgi_id) + @info "Glacier with all data at 0: $(glacier.rgi_id). Updating list of missing glaciers..." end end jldsave(joinpath(params.simulation.working_dir, "data/missing_glaciers.jld2"); missing_glaciers) - # Apply deletion to both gtd_grids and gdirs using the same set of indices + # Apply deletion to both gtd_grids and glaciers using the same set of indices indices_to_remove = findall(x -> length(x[x .!= 0.0]) == 0, gtd_grids) deleteat!(gtd_grids, indices_to_remove) - deleteat!(gdirs, indices_to_remove) + deleteat!(glaciers, indices_to_remove) - - return gtd_grids, gdirs + return gtd_grids, glaciers end -function get_glathida_glacier(gdir, glathida, force) - gtd_path = joinpath(gdir.dir, "glathida.h5") +function get_glathida_glacier(glacier::Glacier2D, params::Parameters, force) + rgi_path = joinpath(prepro_dir, params.simulation.rgi_paths[rgi_id]) + gtd_path = joinpath(rgi_path, "glathida.h5") if isfile(gtd_path) && !force gtd_grid = h5read(gtd_path, "gtd_grid") else - df_gtd = glathida[gdir.rgi_id] - jj, ii = gdir.grid.transform(df_gtd["POINT_LON"], df_gtd["POINT_LAT"], crs=salem[].wgs84, nearest=true) - - gtd_grid = zeros((gdir.grid.ny,gdir.grid.nx)) - for (thick, i, j) in zip(df_gtd["THICKNESS"], ii, jj) - if gtd_grid[i,j] != 0.0 - gtd_grid[i,j] = (gtd_grid[i,j] + thick)/2.0 # average - else - gtd_grid[i,j] = thick - end + glathida = CSV.File(joinpath(rgi_path, "glathida.csv")) + gtd_grid = zeros(size(glacier.H₀)) + count = zeros(size(glacier.H₀)) + for (thick, i, j) in zip(glathida["elevation"], glathida["i_grid"], glathida["j_grid"]) + count[i,j] += 1 + gtd_grid[i,j] += thick end - - # Save file - h5open(joinpath(gdir.dir, "glathida.h5"), "w") do file - write(file, "gtd_grid", gtd_grid) + + gtd_grid .= ifelse.(count > 0, gtd_grid ./ count, 0.0) + + # Save file + h5open(joinpath(prepro_dir, params.simulation.rgi_paths[glacier.rgi_id], "glathida.h5"), "w") do file + write(file, "gtd_grid", gtd_grid) end end return gtd_grid @@ -320,32 +218,28 @@ end function get_glathida_path_and_IDs() gtd_file = Downloads.download("https://cluster.klima.uni-bremen.de/~oggm/glathida/glathida-v3.1.0/data/TTT_per_rgi_id.h5") - glathida = pd[].HDFStore(gtd_file) - rgi_ids = glathida.keys() + glathida = h5open(gtd_file, "r") + rgi_ids = keys(glathida) rgi_ids = String[id[2:end] for id in rgi_ids] return gtd_file, rgi_ids end # [End] Glathida Utilities -function filter_missing_glaciers!(gdirs::PyList, params::Parameters) - task_log = global_tasks[].compile_task_log(gdirs, - task_names=["gridded_attributes", "velocity_to_gdir", "thickness_to_gdir"]) - - task_log.to_csv(joinpath(params.simulation.working_dir, "task_log.csv")) - SF = Union{String, Float64} +function filter_missing_glaciers!(glaciers::Vector{Glacier2D}, params::Parameters) + task_log = CSV.File(joinpath(params.simulation.working_dir, "task_log.csv")) if params.simulation.velocities & params.simulation.use_glathida_data - glacier_filter = (pyconvert(Vector{SF}, task_log.velocity_to_gdir) .!= "SUCCESS") .&& (pyconvert(Vector{SF}, task_log.gridded_attributes) .!= "SUCCESS") .&& (pyconvert(Vector{SF}, task_log.thickness_to_gdir) .!= "SUCCESS") + glacier_filter = (task_log.velocity_to_gdir .!= "SUCCESS") .&& (task_log.gridded_attributes .!= "SUCCESS") .&& (task_log.thickness_to_gdir .!= "SUCCESS") elseif params.simulation.use_glathida_data - glacier_filter = (pyconvert(Vector{SF}, task_log.gridded_attributes) .!= "SUCCESS") .&& (pyconvert(Vector{SF}, task_log.thickness_to_gdir) .!= "SUCCESS") + glacier_filter = (task_log.gridded_attributes .!= "SUCCESS") .&& (task_log.thickness_to_gdir .!= "SUCCESS") else - glacier_filter = (pyconvert(Vector{SF}, task_log.gridded_attributes) .!= "SUCCESS") + glacier_filter = (task_log.gridded_attributes .!= "SUCCESS") end glacier_ids = Vector{String}([]) - for id in PyList(task_log.index) - push!(glacier_ids, pyconvert(String,id)) + for id in task_log["index"] + push!(glacier_ids, id) end missing_glaciers = glacier_ids[glacier_filter] @@ -362,7 +256,7 @@ function filter_missing_glaciers!(gdirs::PyList, params::Parameters) end for id in missing_glaciers - deleteat!(pyconvert(Vector,gdirs), findall(x->pyconvert(String,x.rgi_id)==id, pyconvert(Vector,gdirs))) + deleteat!(glaciers, findall(x->x.rgi_id==id, glaciers)) end # Save missing glaciers in a file @@ -372,20 +266,15 @@ function filter_missing_glaciers!(gdirs::PyList, params::Parameters) return missing_glaciers end -function filter_missing_glaciers!(rgi_ids::Vector{String}, params::Parameters) +function filter_missing_glaciers!(rgi_ids::Vector{String}, params::Parameters) # TODO: see if this is necessary, otherwise remove # Check which glaciers we can actually process - rgi_stats = pd[].read_csv(utils[].file_downloader("https://cluster.klima.uni-bremen.de/~oggm/rgi/rgi62_stats.csv"), index_col=0) - # rgi_stats = rgi_stats.loc[rgi_ids] - - # if any(rgi_stats.Connect .== 2) - # @warn "You have some level 2 glaciers... Removing..." - # rgi_ids = [rgi_stats.loc[rgi_stats.Connect .!= 2].index] - # end + pathCsv = Downloads.download("https://cluster.klima.uni-bremen.de/~oggm/rgi/rgi62_stats.csv") + rgi_stats = CSV.File(pathCsv) - indices = [rgi_stats.index...] + # Remove level 2 glaciers for rgi_id in rgi_ids - if PyList(rgi_stats.Connect.values[indices .== rgi_id]) == 2 + if rgi_stats.Connect[rgi_stats.RGIId .== rgi_id] == 2 @warn "Filtering glacier $rgi_id..." deleteat!(rgi_ids, rgi_ids .== rgi_id) end @@ -401,7 +290,7 @@ function filter_missing_glaciers!(rgi_ids::Vector{String}, params::Parameters) catch error @warn "$error: No missing_glaciers.jld file available. Skipping..." end - + end """ diff --git a/src/parameters/OGGMparameters.jl b/src/parameters/OGGMparameters.jl deleted file mode 100644 index e450bf0..0000000 --- a/src/parameters/OGGMparameters.jl +++ /dev/null @@ -1,93 +0,0 @@ - -export oggm_config - -struct OGGMparameters <: AbstractParameters - working_dir::String - paths::Union{PyDict, Nothing} - params::Union{PyDict, Nothing} - multiprocessing::Bool - workers::Int64 - ice_thickness_source::String - DEM_source::String - base_url::String -end - -""" - OGGMparameters(; - working_dir::String = joinpath(homedir(), "OGGM/OGGM_data"), - paths::Union{PyDict, Nothing} = nothing, - paths::Union{PyDict, Nothing} = nothing, - multiprocessing::Bool = false, - workers::Int64 = 1, - base_url::String = "https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L1-L2_files/elev_bands/" - ) -Initializes OGGM and it configures its parameters. -Keyword arguments -================= - - `working_dir`: Working directory were all the files will be stored. - - `paths`: Dictionary for OGGM-related paths. - - `params`: Dictionary for OGGM-related parameters. - - `multiprocessing`: Determines if multiprocessing is used for OGGM. - - `workers`: How many workers are to be used for OGGM multiprocessing. - - `ice_thickness_source`: Source for the ice thickness dataset. Either `Millan22` of `Farinotti19`. - - `base_url`: Base URL to download all OGGM data. -""" -function OGGMparameters(; - working_dir::String = joinpath(homedir(), "OGGM/OGGM_data"), - paths::Union{PyDict, Nothing} = nothing, - params::Union{PyDict, Nothing} = nothing, - multiprocessing::Bool = false, - workers::Int64 = 1, - ice_thickness_source::String = "Farinotti19", - DEM_source::String = "Default", - base_url::String = "https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L1-L2_files/elev_bands/", - test = false - ) - - @assert ((ice_thickness_source == "Millan22") || (ice_thickness_source == "Farinotti19")) "Wrong ice thickness source! Should be either `Millan22` or `Farinotti19`." - - # Build the OGGM parameters and configuration - OGGM_parameters = OGGMparameters(working_dir, paths, params, - multiprocessing, workers, - ice_thickness_source, DEM_source, - base_url) - - return OGGM_parameters -end - -Base.:(==)(a::OGGMparameters, b::OGGMparameters) = a.working_dir == b.working_dir && a.paths == b.paths && a.params == b.params && - a.multiprocessing == b.multiprocessing && a.workers == b.workers && a.ice_thickness_source == b.ice_thickness_source && - a.DEM_source == b.DEM_source && a.base_url == b.base_url - -""" - oggm_config() - -Configures the basic paths and parameters for OGGM. -""" -function oggm_config(parameters::Parameters) - scope = @__MODULE__ # Capture current module to allow use from external packages (e.g. Huginn, Muninn and ODINN) - @eval begin - @everywhere begin - @eval $scope begin - - cfg[].initialize() # initialize OGGM configuration - - cfg[].PATHS["working_dir"] = $parameters.OGGM.working_dir # Choose own custom path for the OGGM data - cfg[].PARAMS["hydro_month_nh"]=1 - cfg[].PARAMS["dl_verify"] = false - cfg[].PARAMS["continue_on_error"] = true # avoid stopping when a task fails for a glacier (e.g. lack of data) - cfg[].PARAMS["border"] = 10 - # Multiprocessing - # multiprocessing = $oggm_processes > 1 ? true : false - cfg[].PARAMS["use_multiprocessing"] = $parameters.OGGM.multiprocessing # Let's use multiprocessing for OGGM - if $parameters.OGGM.multiprocessing - cfg[].PARAMS["mp_processes"] = $parameters.OGGM.workers - end - - - - end # @eval Sleipnir - end # @everywhere - end # @eval - -end \ No newline at end of file diff --git a/src/parameters/Parameters.jl b/src/parameters/Parameters.jl index aa61cd3..54817f0 100644 --- a/src/parameters/Parameters.jl +++ b/src/parameters/Parameters.jl @@ -1,28 +1,26 @@ -export Parameters, AbstractParameters, PhysicalParameters, SimulationParameters, OGGMparameters, AbstractEmptyParams +export Parameters, AbstractParameters, PhysicalParameters, SimulationParameters, AbstractEmptyParams abstract type AbstractParameters end const AbstractEmptyParams = Union{AbstractParameters,Nothing} -mutable struct Parameters{PPHY <: AbstractEmptyParams, PSIM <: AbstractEmptyParams, PHY <: AbstractEmptyParams, - PSOL <: AbstractEmptyParams, PUDE <: AbstractEmptyParams, POGGM <: AbstractEmptyParams, PINV <: AbstractEmptyParams} +mutable struct Parameters{PPHY <: AbstractEmptyParams, PSIM <: AbstractEmptyParams, PHY <: AbstractEmptyParams, + PSOL <: AbstractEmptyParams, PUDE <: AbstractEmptyParams, PINV <: AbstractEmptyParams} physical::PPHY simulation::PSIM - OGGM::POGGM hyper::PHY solver::PSOL UDE::PUDE - inversion::PINV + inversion::PINV end include("PhysicalParameters.jl") include("SimulationParameters.jl") -include("OGGMparameters.jl") """ Parameters(; + physical::PhysicalParameters = PhysicalParameters(), simulation::SimulationParameters = SimulationParameters() - physical::PhysicalParameters = PhysicalParameters() ) Initialize ODINN parameters @@ -32,23 +30,20 @@ Keyword arguments """ function Parameters(; physical::PhysicalParameters = PhysicalParameters(), - simulation::SimulationParameters = SimulationParameters(), - OGGM::OGGMparameters = OGGMparameters(), + simulation::SimulationParameters = SimulationParameters() ) # Build the parameters based on all the subtypes of parameters - parameters = Parameters(physical, simulation, OGGM, - nothing, nothing, nothing, nothing) + parameters = Parameters(physical, simulation, + nothing, nothing, nothing, nothing) if parameters.simulation.multiprocessing enable_multiprocessing(parameters.simulation.workers) end - - oggm_config(parameters) return parameters end Base.:(==)(a::Parameters, b::Parameters) = a.physical == b.physical && a.simulation == b.simulation && - a.OGGM == b.OGGM && a.solver == b.solver && a.hyper == b.hyper && + a.solver == b.solver && a.hyper == b.hyper && a.UDE == b.UDE && a.inversion == b.inversion diff --git a/src/parameters/SimulationParameters.jl b/src/parameters/SimulationParameters.jl index fbadab8..523ddba 100644 --- a/src/parameters/SimulationParameters.jl +++ b/src/parameters/SimulationParameters.jl @@ -4,7 +4,7 @@ struct SimulationParameters{I <: Integer, F <: AbstractFloat} <: AbstractParamet use_MB::Bool use_iceflow::Bool plots::Bool - velocities::Bool + velocities::Bool overwrite_climate::Bool use_glathida_data::Bool float_type::DataType @@ -12,9 +12,11 @@ struct SimulationParameters{I <: Integer, F <: AbstractFloat} <: AbstractParamet tspan::Tuple{F, F} step::F multiprocessing::Bool - workers::I + workers::I working_dir::String test_mode::Bool + rgi_paths::Dict{String, String} + ice_thickness_source::String end @@ -29,8 +31,13 @@ end float_type::DataType = Float64, int_type::DataType = Int64, tspan::Tuple{F, F} = (2010.0,2015.0), + step::F = 1/12, multiprocessing::Bool = true, - workers::I = 4 + workers::I = 4, + working_dir::String = "", + test_mode::Bool = false, + rgi_paths::Dict{String, String} = Dict{String, String}(), + ice_thickness_source::String = "Farinotti19", ) Initialize the parameters for a simulation. Keyword arguments @@ -54,23 +61,28 @@ function SimulationParameters(; multiprocessing::Bool = true, workers::I = 4, working_dir::String = "", - test_mode::Bool = false + test_mode::Bool = false, + rgi_paths::Dict{String, String} = Dict{String, String}(), + ice_thickness_source::String = "Farinotti19", ) where {I <: Integer, F <: AbstractFloat} + @assert ((ice_thickness_source == "Millan22") || (ice_thickness_source == "Farinotti19")) "Wrong ice thickness source! Should be either `Millan22` or `Farinotti19`." + simulation_parameters = SimulationParameters(use_MB, use_iceflow, plots, velocities, overwrite_climate, use_glathida_data, float_type, int_type, - tspan, step, multiprocessing, workers, working_dir, test_mode) + tspan, step, multiprocessing, workers, working_dir, test_mode, rgi_paths, ice_thickness_source) if !ispath(working_dir) mkpath(joinpath(working_dir, "data")) - end + end return simulation_parameters end -Base.:(==)(a::SimulationParameters, b::SimulationParameters) = a.use_MB == b.use_MB && a.use_iceflow == b.use_iceflow && a.plots == b.plots && +Base.:(==)(a::SimulationParameters, b::SimulationParameters) = a.use_MB == b.use_MB && a.use_iceflow == b.use_iceflow && a.plots == b.plots && a.velocities == b.velocities && a.overwrite_climate == b.overwrite_climate && a.use_glathida_data == b.use_glathida_data && a.float_type == b.float_type && a.int_type == b.int_type && a.tspan == b.tspan && a.step == b.step && a.multiprocessing == b.multiprocessing && - a.workers == b.workers && a.working_dir == b.working_dir && a.test_mode == b.test_mode + a.workers == b.workers && a.working_dir == b.working_dir && a.test_mode == b.test_mode && a.rgi_paths == b.rgi_paths && + a.ice_thickness_source == b.ice_thickness_source diff --git a/src/setup/config.jl b/src/setup/config.jl index 55e2813..55c58cc 100644 --- a/src/setup/config.jl +++ b/src/setup/config.jl @@ -1,100 +1,22 @@ -export rioxarray, netCDF, cfg, utils, workflow, tasks, global_tasks, graphics, bedtopo, millan22, MBsandbox, salem, pd, xr -# export openssl - -using Libdl: dlopen +export get_rgi_paths function __init__() - # Create structural folders if needed - OGGM_path = joinpath(homedir(), "Python/OGGM_data") - if !isdir(OGGM_path) - mkpath(OGGM_path) - end - - # Avoid issue with dylib files - try - load_libxml() - load_spatialite() - catch e - @error "Failed to load required libraries" exception=(e, catch_backtrace()) - rethrow(e) + # Download preprocessed OGGM data + odinn_path = dirname(prepro_dir) + if !isdir(odinn_path) + mkpath(odinn_path) end - - # Load Python packages - # Only load Python packages if not previously loaded by Sleipnir - #println("Initializing Python libraries...") - isassigned(rioxarray) ? nothing : rioxarray[] = pyimport("rioxarray") - isassigned(netCDF4) ? nothing : netCDF4[] = pyimport("netCDF4") - isassigned(cfg) ? nothing : cfg[] = pyimport("oggm.cfg") - isassigned(utils) ? nothing : utils[] = pyimport("oggm.utils") - isassigned(workflow) ? nothing : workflow[] = pyimport("oggm.workflow") - isassigned(tasks) ? nothing : tasks[] = pyimport("oggm.tasks") - isassigned(global_tasks) ? nothing : global_tasks[] = pyimport("oggm.global_tasks") - isassigned(graphics) ? nothing : graphics[] = pyimport("oggm.graphics") - isassigned(bedtopo) ? nothing : bedtopo[] = pyimport("oggm.shop.bedtopo") - isassigned(millan22) ? nothing : millan22[] = pyimport("oggm.shop.millan22") - isassigned(MBsandbox) ? nothing : MBsandbox[] = pyimport("MBsandbox.mbmod_daily_oneflowline") - isassigned(salem) ? nothing : salem[] = pyimport("salem") - isassigned(pd) ? nothing : pd[] = pyimport("pandas") - isassigned(xr) ? nothing : xr[] = pyimport("xarray") -end - -function load_libxml() - lib_dir = joinpath(root_dir, ".CondaPkg/env/lib") - # @show lib_dir - - # Find all libspatialite files in the directory - if Sys.isapple() - lib_files = filter(f -> startswith(f, "libxml") && (endswith(f, ".dylib") || contains(f, ".dylib.")), readdir(lib_dir)) - elseif Sys.islinux() - lib_files = filter(f -> startswith(f, "libxml") && (endswith(f, ".so") || contains(f, ".so.")), readdir(lib_dir)) - else - error("Unsupported operating system") + if !isdir(prepro_dir) + @info "Downloading preprocessed data" + tarGzFile = Downloads.download("https://docs.google.com/uc?export=download&id=1d070a_YqN5aPAONpnzL9hfInv1DA8z3p") + tar_gz = open(tarGzFile) + tar = GzipDecompressorStream(tar_gz) + tempDir = Tar.extract(tar) + close(tar) + mv(joinpath(tempDir, "ODINN_prepro"), prepro_dir) end - if isempty(lib_files) - println("No libxml files found in $lib_dir") - return - end - - for lib_file in lib_files - lib_path = joinpath(lib_dir, lib_file) - try - dlopen(lib_path) - println("Opened $lib_path") - catch e - println("Failed to load $lib_path: $e") - end - end -end - -function load_spatialite() - lib_dir = joinpath(root_dir, ".CondaPkg/env/lib") - # @show lib_dir - - # Find all libspatialite files in the directory - if Sys.isapple() - lib_files = filter(f -> startswith(f, "libspatialite") && (endswith(f, ".dylib") || contains(f, ".dylib.")), readdir(lib_dir)) - elseif Sys.islinux() - lib_files = filter(f -> startswith(f, "libspatialite") && (endswith(f, ".so") || contains(f, ".so.")), readdir(lib_dir)) - else - error("Unsupported operating system") - end - - if isempty(lib_files) - println("No libspatialite files found in $lib_dir") - return - end - - for lib_file in lib_files - lib_path = joinpath(lib_dir, lib_file) - try - dlopen(lib_path) - println("Opened $lib_path") - catch e - println("Failed to load $lib_path: $e") - end - end end function clean() @@ -124,6 +46,17 @@ function clean() return nworkers() end +function filter_existing_paths(paths::Vector{String}) + # Use `filter` to retain only the paths that exist + existing_paths = filter(ispath, paths) + return existing_paths +end + +function get_rgi_paths() + rgi_paths = JSON.parsefile(joinpath(prepro_dir, "rgi_paths.json")) + rgi_paths = Dict(k => string(v) for (k,v) in pairs(rgi_paths)) # Convert Dict{String, Any} to Dict{String, String} + return rgi_paths +end -include("helper_utilities.jl") \ No newline at end of file +include("helper_utilities.jl") diff --git a/src/setup/helper_utilities.jl b/src/setup/helper_utilities.jl index a1c7ca1..0e2d88a 100644 --- a/src/setup/helper_utilities.jl +++ b/src/setup/helper_utilities.jl @@ -10,22 +10,3 @@ function safe_approx(a, b) return a ≈ b end end - -# Function for Python objects -function safe_getproperty(obj::Py, prop_name::Symbol) - if PyCall.hasproperty(obj, prop_name) - return PyCall.getproperty(obj, prop_name) - else - return 0.0 - end -end - -# Function for Julia objects -function safe_getproperty(obj, prop_name::Symbol) - if hasproperty(obj, prop_name) - return getproperty(obj, prop_name) - else - return 0.0 - end -end - diff --git a/test/data/glaciers/glaciers2D.jld2 b/test/data/glaciers/glaciers2D.jld2 index ee6e4c2..a83e911 100644 Binary files a/test/data/glaciers/glaciers2D.jld2 and b/test/data/glaciers/glaciers2D.jld2 differ diff --git a/test/data/missing_glaciers.jld2 b/test/data/missing_glaciers.jld2 deleted file mode 100644 index f3f1108..0000000 Binary files a/test/data/missing_glaciers.jld2 and /dev/null differ diff --git a/test/data/params/oggm_params_default.jld2 b/test/data/params/oggm_params_default.jld2 deleted file mode 100644 index 5b7a568..0000000 Binary files a/test/data/params/oggm_params_default.jld2 and /dev/null differ diff --git a/test/data/params/oggm_params_specified.jld2 b/test/data/params/oggm_params_specified.jld2 deleted file mode 100644 index e0788df..0000000 Binary files a/test/data/params/oggm_params_specified.jld2 and /dev/null differ diff --git a/test/data/params/params_default.jld2 b/test/data/params/params_default.jld2 index eb91225..88e37a3 100644 Binary files a/test/data/params/params_default.jld2 and b/test/data/params/params_default.jld2 differ diff --git a/test/data/params/params_specified.jld2 b/test/data/params/params_specified.jld2 index ce4dc25..b2fe94a 100644 Binary files a/test/data/params/params_specified.jld2 and b/test/data/params/params_specified.jld2 differ diff --git a/test/data/params/physical_params_default.jld2 b/test/data/params/physical_params_default.jld2 index 7308132..e5e13cc 100644 Binary files a/test/data/params/physical_params_default.jld2 and b/test/data/params/physical_params_default.jld2 differ diff --git a/test/data/params/physical_params_specified.jld2 b/test/data/params/physical_params_specified.jld2 index 7308132..e5e13cc 100644 Binary files a/test/data/params/physical_params_specified.jld2 and b/test/data/params/physical_params_specified.jld2 differ diff --git a/test/data/params/simulation_params_default.jld2 b/test/data/params/simulation_params_default.jld2 index f44196e..3a79606 100644 Binary files a/test/data/params/simulation_params_default.jld2 and b/test/data/params/simulation_params_default.jld2 differ diff --git a/test/data/params/simulation_params_specified.jld2 b/test/data/params/simulation_params_specified.jld2 index 619b9ce..0850e67 100644 Binary files a/test/data/params/simulation_params_specified.jld2 and b/test/data/params/simulation_params_specified.jld2 differ diff --git a/test/glaciers_construction.jl b/test/glaciers_construction.jl index 05376cc..1977eae 100644 --- a/test/glaciers_construction.jl +++ b/test/glaciers_construction.jl @@ -2,30 +2,24 @@ function glaciers2D_constructor(; save_refs::Bool = false) + rgi_paths = get_rgi_paths() rgi_ids = ["RGI60-11.03638", "RGI60-11.01450"] params = Parameters(simulation=SimulationParameters(velocities=false, use_glathida_data=false, working_dir=Sleipnir.root_dir, - test_mode=true), - OGGM=OGGMparameters(ice_thickness_source="Farinotti19")) + test_mode=true, + rgi_paths=rgi_paths)) glaciers = initialize_glaciers(rgi_ids, params; test=true) - # Empty all PyCall stuff to avoid issues - for glacier in glaciers - glacier.gdir = nothing - glacier.climate = nothing - glacier.S_coords = nothing - end - if save_refs jldsave(joinpath(Sleipnir.root_dir, "test/data/glaciers/glaciers2D.jld2"); glaciers) end glaciers_ref = load(joinpath(Sleipnir.root_dir,"test/data/glaciers/glaciers2D.jld2"))["glaciers"] - @test all(glaciers .≈ glaciers_ref) + @test all(glaciers == glaciers_ref) end diff --git a/test/params_construction.jl b/test/params_construction.jl index b69e9fb..22498bc 100644 --- a/test/params_construction.jl +++ b/test/params_construction.jl @@ -1,6 +1,8 @@ function params_constructor_specified(; save_refs::Bool = false) + rgi_paths = get_rgi_paths() + physical_params = PhysicalParameters(ρ = 900.0, g = 9.81, ϵ = 1e-3, @@ -22,40 +24,25 @@ function params_constructor_specified(; save_refs::Bool = false) tspan = (2010.0,2015.0), multiprocessing = false, workers = 10, - working_dir = "") - - oggm_params = OGGMparameters(working_dir = "", - paths = nothing, - params = nothing, - multiprocessing = false, - workers = 1, - ice_thickness_source = "Millan22", - DEM_source = "Default", - base_url = "https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L1-L2_files/elev_bands/", - test = true) - + working_dir = "", + rgi_paths = rgi_paths) params = Parameters(physical=physical_params, - simulation=simulation_params, - OGGM=oggm_params) + simulation=simulation_params) if save_refs jldsave(joinpath(Sleipnir.root_dir, "test/data/params/simulation_params_specified.jld2"); simulation_params) jldsave(joinpath(Sleipnir.root_dir, "test/data/params/physical_params_specified.jld2"); physical_params) - jldsave(joinpath(Sleipnir.root_dir, "test/data/params/oggm_params_specified.jld2"); oggm_params) jldsave(joinpath(Sleipnir.root_dir, "test/data/params/params_specified.jld2"); params) end simulation_params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/simulation_params_specified.jld2"))["simulation_params"] physical_params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/physical_params_specified.jld2"))["physical_params"] - oggm_params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/oggm_params_specified.jld2"))["oggm_params"] params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/params_specified.jld2"))["params"] @test physical_params == physical_params_ref @test simulation_params == simulation_params_ref - @test oggm_params == oggm_params_ref @test params == params_ref - end @@ -65,28 +52,22 @@ function params_constructor_default(; save_refs::Bool = false) simulation_params = SimulationParameters() - oggm_params = OGGMparameters(test=true, working_dir="") - params = Parameters(simulation=simulation_params, - physical=physical_params, - OGGM=oggm_params + physical=physical_params ) if save_refs jldsave(joinpath(Sleipnir.root_dir, "test/data/params/simulation_params_default.jld2"); simulation_params) jldsave(joinpath(Sleipnir.root_dir, "test/data/params/physical_params_default.jld2"); physical_params) - jldsave(joinpath(Sleipnir.root_dir, "test/data/params/oggm_params_default.jld2"); oggm_params) jldsave(joinpath(Sleipnir.root_dir, "test/data/params/params_default.jld2"); params) end simulation_params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/simulation_params_default.jld2"))["simulation_params"] physical_params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/physical_params_default.jld2"))["physical_params"] - oggm_params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/oggm_params_default.jld2"))["oggm_params"] params_ref = load(joinpath(Sleipnir.root_dir, "test/data/params/params_default.jld2"))["params"] @test physical_params == physical_params_ref @test simulation_params == simulation_params_ref - @test oggm_params == oggm_params_ref @test params == params_ref end \ No newline at end of file diff --git a/test/runtests.jl b/test/runtests.jl index 78b5472..772e113 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -7,6 +7,8 @@ using Test using JLD2 using Infiltrator using CairoMakie +using JSON +import NCDatasets include("params_construction.jl") include("glaciers_construction.jl")