Skip to content

Commit

Permalink
refactored away a cli arg for ppv1 (the date can be determined by loo…
Browse files Browse the repository at this point in the history
…king inside the croco output file)
  • Loading branch information
zachsa committed Apr 25, 2023
1 parent 958cc70 commit 46f3724
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
upsert_model_run,
)

models_file = os.path.abspath("models.json")

current_dir = os.path.dirname(os.path.abspath(__file__))
models_file = os.path.join(current_dir, "models.json")
merge_details_file = os.path.join(current_dir, "upsert_model_info/upsert-details.sql")
merge_coordinates_file = os.path.join(
current_dir, "upsert_model_info/upsert-coordinates.sql"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,12 @@ async def upsert_model_run(pool, id, run_date, ds, input, model, parallelization
# Upsert values
datetimes = ds.time.values
total_depth_levels = ds.sizes["depth"]
await upsert_values(runid, datetimes, total_depth_levels, parallelization)
total_timesteps = ds.sizes["time"]
log("Total depth levels", total_depth_levels)
log("Total timesteps", total_timesteps)
await upsert_values(
runid, datetimes, total_depth_levels, parallelization, total_timesteps
)

# Finalize the run
async with pool.acquire() as conn:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import asyncio
import asyncpg

total_timesteps = 240


async def load_worker(queue, async_pool, runid, datetimes, total_depth_levels):
while True:
Expand All @@ -18,7 +16,9 @@ async def load_worker(queue, async_pool, runid, datetimes, total_depth_levels):
queue.task_done()


async def upsert_values(runid, datetimes, total_depth_levels, parallelization):
async def upsert_values(
runid, datetimes, total_depth_levels, parallelization, total_timesteps
):
depth_levels = [*range(int(1), int(total_depth_levels) + 1, 1)]
async_pool = await asyncpg.create_pool(
database=PG_DB,
Expand Down
7 changes: 4 additions & 3 deletions toolkit/cli/applications/croco/post_process_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,12 @@ def post_process_v1(args):
grid = os.path.abspath(args.grid)
input = os.path.abspath(args.input)
output = os.path.abspath(args.output)
run_date = args.run_date

log("Running CROCO output post-processing (v1)")
log("CONFIG::id", id)
log("CONFIG::input", input)
log("CONFIG::grid", grid)
log("CONFIG::output", output)
log("CONFIG::run_date", run_date)

# Ensure the directory for the specified output exists
os.makedirs(os.path.dirname(output), exist_ok=True)
Expand All @@ -52,7 +50,10 @@ def post_process_v1(args):
date_now = REFERENCE_DATE + timedelta(seconds=np.float64(t))
date_round = hour_rounder(date_now)
time_steps.append(date_round)
log("Generated time steps")

# Get the run_date from the CROCO output NetCDF file
run_date = time_steps[119].strftime("%Y%m%d")
log("CONFIG::run_date", run_date)

# Variables used in the visualisations
temperature = data.temp.values
Expand Down
8 changes: 0 additions & 8 deletions toolkit/cli/define/croco/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
from datetime import datetime

NOW = datetime.now().strftime("%Y%m%d")


def build(module_parser):
croco = module_parser.add_parser("croco", help="CROCO module")
croco_parser = croco.add_subparsers(
Expand Down Expand Up @@ -50,9 +45,6 @@ def build(module_parser):
help="Path of processed output path",
default=".output/croco/post-process-v1-output.nc",
)
croco_post_process_v1.add_argument(
"--run-date", default=NOW, help="Run date (yyyymmdd)"
)

"""
Post-processing V2:
Expand Down
File renamed without changes.

0 comments on commit 46f3724

Please sign in to comment.