diff --git a/pyTMD/arguments.py b/pyTMD/arguments.py index 340a90a0..7b0ab07a 100755 --- a/pyTMD/arguments.py +++ b/pyTMD/arguments.py @@ -39,6 +39,7 @@ UPDATE HISTORY: Updated 08/2024: add support for constituents in PERTH5 tables + add back nodal arguments from PERTH3 for backwards compatibility Updated 01/2024: add function to create arguments coefficients table add function to calculate the arguments for minor constituents include multiples of 90 degrees as variable following Ray 2017 @@ -129,7 +130,8 @@ def arguments( kwargs.setdefault('M1', 'perth5') # set function for astronomical longitudes - ASTRO5 = True if kwargs['corrections'] in ('GOT', 'FES') else False + # use ASTRO5 routines if not using an OTIS type model + ASTRO5 = kwargs['corrections'] not in ('OTIS','ATLAS','TMD3','netcdf') # convert from Modified Julian Dates into Ephemeris Time s, h, p, n, pp = pyTMD.astro.mean_longitudes(MJD + kwargs['deltat'], ASTRO5=ASTRO5) @@ -204,7 +206,8 @@ def minor_arguments( # degrees to radians dtr = np.pi/180.0 # set function for astronomical longitudes - ASTRO5 = True if kwargs['corrections'] in ('GOT', 'FES') else False + # use ASTRO5 routines if not using an OTIS type model + ASTRO5 = kwargs['corrections'] not in ('OTIS','ATLAS','TMD3','netcdf') # convert from Modified Julian Dates into Ephemeris Time s, h, p, n, pp = pyTMD.astro.mean_longitudes(MJD + kwargs['deltat'], ASTRO5=ASTRO5) @@ -366,22 +369,22 @@ def coefficients_table( # coefficients['sa'] = [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0] coefficients['ssa'] = [0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0] # With p' - coefficients['sta'] = [0.0, 0.0, 3.0, 0.0, 0.0, -1.0, 0.0] + coefficients['sta'] = [0.0, 0.0, 3.0, 0.0, 0.0, -1.0, 0.0] # # Without p' # coefficients['sta'] = [0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0] coefficients['st'] = [0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0] coefficients['msm'] = [0.0, 1.0, -2.0, 1.0, 0.0, 0.0, 0.0] coefficients['mm'] = [0.0, 1.0, 0.0, -1.0, 0.0, 0.0, 0.0] # annual sideline - coefficients['msfa'] = [0.0, 2.0, -3.0, 0.0, 0.0, 0.0, 0.0] + coefficients['msfa'] = [0.0, 2.0, -3.0, 0.0, 0.0, 0.0, 0.0] coefficients['msf'] = [0.0, 2.0, -2.0, 0.0, 0.0, 0.0, 0.0] # annual sideline - coefficients['msfb'] = [0.0, 2.0, -1.0, 0.0, 0.0, 0.0, 0.0] + coefficients['msfb'] = [0.0, 2.0, -1.0, 0.0, 0.0, 0.0, 0.0] coefficients['mfa'] = [0.0, 2.0, -1.0, 0.0, 0.0, 0.0, 0.0] coefficients['mf-'] = [0.0, 2.0, 0.0, 0.0, -1.0, 0.0, 0.0] coefficients['mf'] = [0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0] # nodal line - coefficients['mf+'] = [0.0, 2.0, 0.0, 0.0, 1.0, 0.0, 0.0] + coefficients['mf+'] = [0.0, 2.0, 0.0, 0.0, 1.0, 0.0, 0.0] # nodal line coefficients['mfn'] = [0.0, 2.0, 0.0, 0.0, 1.0, 0.0, 0.0] coefficients['mfb'] = [0.0, 2.0, 1.0, 0.0, 0.0, 0.0, 0.0] @@ -413,12 +416,12 @@ def coefficients_table( coefficients['opk1'] = [1.0, -1.0, -2.0, 0.0, 0.0, 0.0, 1.0] coefficients['oa1'] = [1.0, -1.0, -1.0, 0.0, 0.0, 0.0, -1.0] # O1 nodal line - coefficients['o1n'] = [1.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0] + coefficients['o1n'] = [1.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0] # O1 nodal line - coefficients['o1-'] = [1.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0] + coefficients['o1-'] = [1.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0] coefficients['o1'] = [1.0, -1.0, 0.0, 0.0, 0.0, 0.0, -1.0] # conjugate to nodal line - coefficients['o1+'] = [1.0, -1.0, 0.0, 0.0, 1.0, 0.0, -1.0] + coefficients['o1+'] = [1.0, -1.0, 0.0, 0.0, 1.0, 0.0, -1.0] # 3rd degree terms coefficients["o1'"] = [1.0, -1.0, 0.0, 1.0, 0.0, 0.0, 2.0] coefficients['ob1'] = [1.0, -1.0, 1.0, 0.0, 0.0, 0.0, -1.0] @@ -443,9 +446,9 @@ def coefficients_table( coefficients['s1-'] = [1.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0] if kwargs['corrections'] in ('OTIS','ATLAS','TMD3','netcdf'): coefficients['s1'] = [1.0, 1.0, -1.0, 0.0, 0.0, 0.0, 1.0] - elif kwargs['corrections'] in ('GOT', 'FES'): + else: # Doodson's phase - coefficients['s1'] = [1.0, 1.0, -1.0, 0.0, 0.0, 0.0, 2.0] + coefficients['s1'] = [1.0, 1.0, -1.0, 0.0, 0.0, 0.0, 2.0] coefficients['s1+'] = [1.0, 1.0, -1.0, 0.0, 0.0, 1.0, 1.0] coefficients['ojm1'] = [1.0, 1.0, 0.0, -2.0, 0.0, 0.0, -1.0] # 3rd degree terms @@ -530,11 +533,11 @@ def coefficients_table( coefficients['m2-1'] = [2.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0] coefficients['ma2'] = [2.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0] # M2 nodal line - coefficients['m2-'] = [2.0, 0.0, 0.0, 0.0, -1.0, 0.0, 2.0] + coefficients['m2-'] = [2.0, 0.0, 0.0, 0.0, -1.0, 0.0, 2.0] coefficients['m2'] = [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] coefficients['ko2'] = [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] # conjugate to nodal - coefficients['m2+'] = [2.0, 0.0, 0.0, 0.0, 1.0, 0.0, 2.0] + coefficients['m2+'] = [2.0, 0.0, 0.0, 0.0, 1.0, 0.0, 2.0] # 3rd degree terms coefficients["m2'"] = [2.0, 0.0, 0.0, 1.0, 0.0, 0.0, -1.0] coefficients['mb2'] = [2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0] @@ -580,21 +583,21 @@ def coefficients_table( coefficients['kj2'] = [2.0, 3.0, 0.0, -1.0, 0.0, 0.0, 2.0] coefficients['2kmsn2'] = [2.0, 3.0, 2.0, -1.0, 0.0, 0.0, 0.0] coefficients['2km(sn)2'] = [2.0, 3.0, 2.0, -1.0, 0.0, 0.0, 0.0] - coefficients['2sm2'] = [2.0, 4.0, -4.0, 0.0, 0.0, 0.0, 0.0] - coefficients['2ms2n2'] = [2.0, 4.0, -2.0, -2.0, 0.0, 0.0, 0.0] - coefficients['skm2'] = [2.0, 4.0, -2.0, 0.0, 0.0, 0.0, 0.0] + coefficients['2sm2'] = [2.0, 4.0, -4.0, 0.0, 0.0, 0.0, 0.0] + coefficients['2ms2n2'] = [2.0, 4.0, -2.0, -2.0, 0.0, 0.0, 0.0] + coefficients['skm2'] = [2.0, 4.0, -2.0, 0.0, 0.0, 0.0, 0.0] coefficients['2j2'] = [2.0, 4.0, 0.0, -2.0, 0.0, 0.0, 2.0] coefficients['2k2'] = [2.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0] - coefficients['2snu2'] = [2.0, 5.0, -6.0, 1.0, 0.0, 0.0, 0.0] - coefficients['3(sm)n2'] = [2.0, 5.0, -6.0, 1.0, 0.0, 0.0, 0.0] - coefficients['2sn2'] = [2.0, 5.0, -4.0, -1.0, 0.0, 0.0, 0.0] - coefficients['skn2'] = [2.0, 5.0, -2.0, -1.0, 0.0, 0.0, 0.0] - coefficients['2kn2'] = [2.0, 5.0, 0.0, -1.0, 0.0, 0.0, 0.0] - coefficients['3s2m2'] = [2.0, 6.0, -6.0, 0.0, 0.0, 0.0, 0.0] - coefficients['2sk2m2'] = [2.0, 6.0, -4.0, 0.0, 0.0, 0.0, 0.0] + coefficients['2snu2'] = [2.0, 5.0, -6.0, 1.0, 0.0, 0.0, 0.0] + coefficients['3(sm)n2'] = [2.0, 5.0, -6.0, 1.0, 0.0, 0.0, 0.0] + coefficients['2sn2'] = [2.0, 5.0, -4.0, -1.0, 0.0, 0.0, 0.0] + coefficients['skn2'] = [2.0, 5.0, -2.0, -1.0, 0.0, 0.0, 0.0] + coefficients['2kn2'] = [2.0, 5.0, 0.0, -1.0, 0.0, 0.0, 0.0] + coefficients['3s2m2'] = [2.0, 6.0, -6.0, 0.0, 0.0, 0.0, 0.0] + coefficients['2sk2m2'] = [2.0, 6.0, -4.0, 0.0, 0.0, 0.0, 0.0] coefficients['2oq3'] = [3.0, -4.0, 0.0, 1.0, 0.0, 0.0, 1.0] # compound 3 O1 - coefficients['o3'] = [3.0, -3.0, 0.0, 0.0, 0.0, 0.0, 1.0] + coefficients['o3'] = [3.0, -3.0, 0.0, 0.0, 0.0, 0.0, 1.0] coefficients['nq3'] = [3.0, -3.0, 0.0, 2.0, 0.0, 0.0, -1.0] coefficients['muo3'] = [3.0, -3.0, 2.0, 0.0, 0.0, 0.0, -1.0] coefficients['mq3'] = [3.0, -2.0, 0.0, 1.0, 0.0, 0.0, -1.0] @@ -603,7 +606,7 @@ def coefficients_table( coefficients['2op3'] = [3.0, -1.0, -2.0, 0.0, 0.0, 0.0, 1.0] coefficients['2os3'] = [3.0, -1.0, -1.0, 0.0, 0.0, 0.0, 0.0] # Q1+M1+S1 - coefficients['qms3'] = [3.0, -1.0, -1.0, 2.0, 0.0, 0.0, 2.0] + coefficients['qms3'] = [3.0, -1.0, -1.0, 2.0, 0.0, 0.0, 2.0] coefficients['mo3-'] = [3.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0] coefficients['mo3'] = [3.0, -1.0, 0.0, 0.0, 0.0, 0.0, -1.0] coefficients['mo3+'] = [3.0, -1.0, 0.0, 0.0, 1.0, 0.0, -1.0] @@ -618,13 +621,13 @@ def coefficients_table( coefficients['ns3'] = [3.0, 0.0, -1.0, 1.0, 0.0, 0.0, 2.0] coefficients['2oj3'] = [3.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0] # 2M2 - M1 - coefficients['2mm3'] = [3.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0] - coefficients['m3'] = [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + coefficients['2mm3'] = [3.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0] + coefficients['m3'] = [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] # 3rd degree terms coefficients["m3'"] = [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0] coefficients['nk3'] = [3.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] coefficients['mp3'] = [3.0, 1.0, -2.0, 0.0, 0.0, 0.0, -1.0] - coefficients['so3'] = [3.0, 1.0, -2.0, 0.0, 0.0, 0.0, -1.0] + coefficients['so3'] = [3.0, 1.0, -2.0, 0.0, 0.0, 0.0, -1.0] # 3rd degree terms coefficients['lambda3'] = [3.0, 1.0, -2.0, 1.0, 0.0, 0.0, 0.0] coefficients['ms3'] = [3.0, 1.0, -1.0, 0.0, 0.0, 0.0, 2.0] @@ -657,7 +660,7 @@ def coefficients_table( coefficients['r3'] = [3.0, 3.0, -2.0, 0.0, 0.0, 0.0, 2.0] coefficients['sk3'] = [3.0, 3.0, -2.0, 0.0, 0.0, 0.0, 1.0] # s3 perturbation - coefficients['2r3'] = [3.0, 3.0, -1.0, 0.0, 0.0, 0.0, 2.0] + coefficients['2r3'] = [3.0, 3.0, -1.0, 0.0, 0.0, 0.0, 2.0] coefficients['k3'] = [3.0, 3.0, 0.0, 0.0, 0.0, 0.0, -1.0] coefficients['2so3'] = [3.0, 5.0, -4.0, 0.0, 0.0, 0.0, 1.0] coefficients['2jp3'] = [3.0, 5.0, -2.0, -2.0, 0.0, 0.0, 1.0] @@ -667,11 +670,11 @@ def coefficients_table( coefficients['2sq3'] = [3.0, 6.0, -4.0, -1.0, 0.0, 0.0, 1.0] coefficients['o4'] = [4.0, -4.0, 0.0, 0.0, 0.0, 0.0, 0.0] coefficients['2qm4'] = [4.0, -4.0, 0.0, 2.0, 0.0, 0.0, 2.0] - coefficients['4ms4'] = [4.0, -4.0, 4.0, 0.0, 0.0, 0.0, 0.0] - coefficients['4m2s4'] = [4.0, -4.0, 4.0, 0.0, 0.0, 0.0, 0.0] - coefficients['2mnk4'] = [4.0, -3.0, 0.0, 1.0, 0.0, 0.0, 0.0] - coefficients['moq4'] = [4.0, -3.0, 0.0, 1.0, 0.0, 0.0, 2.0] - coefficients['2mns4'] = [4.0, -3.0, 2.0, 1.0, 0.0, 0.0, 0.0] + coefficients['4ms4'] = [4.0, -4.0, 4.0, 0.0, 0.0, 0.0, 0.0] + coefficients['4m2s4'] = [4.0, -4.0, 4.0, 0.0, 0.0, 0.0, 0.0] + coefficients['2mnk4'] = [4.0, -3.0, 0.0, 1.0, 0.0, 0.0, 0.0] + coefficients['moq4'] = [4.0, -3.0, 0.0, 1.0, 0.0, 0.0, 2.0] + coefficients['2mns4'] = [4.0, -3.0, 2.0, 1.0, 0.0, 0.0, 0.0] coefficients['2mns4'] = [4.0, -3.0, 4.0, -1.0, 0.0, 0.0, 0.0] coefficients['2mnus4'] = [4.0, -3.0, 4.0, -1.0, 0.0, 0.0, 0.0] coefficients['3mk4'] = [4.0, -2.0, 0.0, 0.0, 0.0, 0.0, 0.0] @@ -734,7 +737,7 @@ def coefficients_table( coefficients['2ms5'] = [5.0, 1.0, -1.0, 0.0, 0.0, 0.0, 2.0] coefficients['2mk5'] = [5.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0] # N2 + K2 + S1 - coefficients['nks5'] = [5.0, 2.0, -1.0, 1.0, 0.0, 0.0, 2.0] + coefficients['nks5'] = [5.0, 2.0, -1.0, 1.0, 0.0, 0.0, 2.0] coefficients['nsk5'] = [5.0, 2.0, -2.0, -1.0, 0.0, 0.0, 1.0] coefficients['msm5'] = [5.0, 2.0, -2.0, 0.0, 0.0, 0.0, 2.0] coefficients['snk5'] = [5.0, 2.0, -2.0, 1.0, 0.0, 0.0, -1.0] @@ -843,14 +846,14 @@ def coefficients_table( coefficients['ma8'] = [8.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0] coefficients['m8'] = [8.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] coefficients['mb8'] = [8.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0] - coefficients['2msn8'] = [8.0, 1.0, -2.0, 1.0, 0.0, 0.0, 0.0] + coefficients['2msn8'] = [8.0, 1.0, -2.0, 1.0, 0.0, 0.0, 0.0] coefficients['3ml8'] = [8.0, 1.0, 0.0, -1.0, 0.0, 0.0, 0.0] coefficients['2mnk8'] = [8.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0] coefficients['3mt8'] = [8.0, 2.0, -3.0, 0.0, 0.0, 1.0, 0.0] coefficients['3ms8'] = [8.0, 2.0, -2.0, 0.0, 0.0, 0.0, 0.0] coefficients['3mk8'] = [8.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0] - coefficients['2smn8'] = [8.0, 3.0, -4.0, 1.0, 0.0, 0.0, 0.0] - coefficients['2msl8'] = [8.0, 3.0, -2.0, -1.0, 0.0, 0.0, 2.0] + coefficients['2smn8'] = [8.0, 3.0, -4.0, 1.0, 0.0, 0.0, 0.0] + coefficients['2msl8'] = [8.0, 3.0, -2.0, -1.0, 0.0, 0.0, 2.0] coefficients['msnk8'] = [8.0, 3.0, -2.0, 1.0, 0.0, 0.0, 0.0] coefficients['4msn8'] = [8.0, 3.0, 0.0, -1.0, 0.0, 0.0, 0.0] coefficients['2(ms)8'] = [8.0, 4.0, -4.0, 0.0, 0.0, 0.0, 0.0] @@ -879,7 +882,7 @@ def coefficients_table( coefficients['s10'] = [10.0, 10.0, -10.0, 0.0, 0.0, 0.0, 0.0] coefficients['4msk11'] = [11.0, 3.0, -2, 0.0, 0.0, 0.0, 1.0] coefficients['s11'] = [11.0, 11.0, -11.0, 0.0, 0.0, 0.0, 0.0] - coefficients['5mn12'] = [12.0, -1, 0.0, 1.0, 0.0, 0.0, 0.0] + coefficients['5mn12'] = [12.0, -1, 0.0, 1.0, 0.0, 0.0, 0.0] coefficients['m12'] = [12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] coefficients['4msn12'] = [12.0, 1.0, -2, 1.0, 0.0, 0.0, 0.0] coefficients['4mns12'] = [12.0, 1.0, -2, 1.0, 0.0, 0.0, 0.0] @@ -1051,6 +1054,7 @@ def nodal( # set correction type OTIS_TYPE = kwargs['corrections'] in ('OTIS','ATLAS','TMD3','netcdf') FES_TYPE = kwargs['corrections'] in ('FES',) + PERTH3_TYPE = kwargs['corrections'] in ('perth3',) # degrees to radians dtr = np.pi/180.0 @@ -1096,12 +1100,12 @@ def nodal( Q_sec = (np.sin(II)**2)*np.cos(2.0*nu) + 0.0727 nu_sec = 0.5*np.arctan(P_sec/Q_sec) - # compute standard nodal corrections f and u + # compute standard nodal corrections f and u for i, c in enumerate(constituents): if c in ('msf','tau1','p1','theta1','lambda2','s2') and OTIS_TYPE: term1 = 0.0 term2 = 1.0 - elif c in ('p1','s2') and FES_TYPE: + elif c in ('p1','s2') and (FES_TYPE or PERTH3_TYPE): term1 = 0.0 term2 = 1.0 elif c in ('mm','msm') and OTIS_TYPE: @@ -1148,25 +1152,29 @@ def nodal( u[:,i] = dtr*(10.8*sinn - 1.3*sin2n + 0.2*sin3n) continue elif c in ('o1','so3','op2','2q1','q1','rho1','sigma1') and FES_TYPE: - f[:,i] = np.sin(II)*(np.cos(II/2.0)**2)/0.38 + f[:,i] = np.sin(II)*(np.cos(II/2.0)**2)/0.38 u[:,i] = (2.0*xi - nu) continue + elif c in ('q1','o1') and PERTH3_TYPE: + f[:,i] = 1.009 + 0.187*cosn - 0.015*cos2n + u[:,i] = dtr*(10.8*sinn - 1.3*sin2n) + continue elif c in ('o1','so3','op2'): term1 = 0.1886*sinn - 0.0058*sin2n - 0.0065*sin2p term2 = 1.0 + 0.1886*cosn - 0.0058*cos2n - 0.0065*cos2p elif c in ('2q1','q1','rho1','sigma1') and OTIS_TYPE: f[:,i] = np.sqrt((1.0 + 0.188*cosn)**2 + (0.188*sinn)**2) u[:,i] = np.arctan(0.189*sinn/(1.0 + 0.189*cosn)) - continue + continue elif c in ('2q1','q1','rho1','sigma1'): - term1 = 0.1886*sinn + term1 = 0.1886*sinn term2 = 1.0 + 0.1886*cosn elif c in ('tau1',): - term1 = 0.219*sinn - term2 = 1.0 - 0.219*cosn + term1 = 0.219*sinn + term2 = 1.0 - 0.219*cosn elif c in ('beta1',): - term1 = 0.226*sinn - term2 = 1.0 + 0.226*cosn + term1 = 0.226*sinn + term2 = 1.0 + 0.226*cosn elif c in ('m1',) and (kwargs['M1'] == 'Doodson'): # A. T. Doodson's coefficients for M1 tides term1 = sinp + 0.2*np.sin((p-n)*dtr) @@ -1187,11 +1195,11 @@ def nodal( u[:,i] = -nu continue elif c in ('chi1',): - term1 = -0.250*sinn - term2 = 1.0 + 0.193*cosn + term1 = -0.250*sinn + term2 = 1.0 + 0.193*cosn elif c in ('p1',): - term1 = -0.0112*sinn - term2 = 1.0 - 0.0112*cosn + term1 = -0.0112*sinn + term2 = 1.0 - 0.0112*cosn elif c in ('k1','sk3','2sk5') and OTIS_TYPE: term1 = -0.1554*sinn + 0.0029*sin2n term2 = 1.0 + 0.1158*cosn - 0.0029*cos2n @@ -1201,6 +1209,10 @@ def nodal( f[:,i] = np.sqrt(temp1 + temp2 + 0.1006) u[:,i] = -nu_prime continue + elif c in ('k1',) and PERTH3_TYPE: + f[:,i] = 1.006 + 0.115*cosn - 0.009*cos2n + u[:,i] = dtr*(-8.9*sinn + 0.7*sin2n) + continue elif c in ('k1','sk3','2sk5'): term1 = -0.1554*sinn + 0.0031*sin2n term2 = 1.0 + 0.1158*cosn - 0.0028*cos2n @@ -1222,6 +1234,10 @@ def nodal( f[:,i] = np.power(np.cos(II/2.0),4.0)/0.9154 u[:,i] = 2.0*xi - 2.0*nu continue + elif c in ('m2','n2') and PERTH3_TYPE: + f[:,i] = 1.000 - 0.037*cosn + u[:,i] = dtr*(-2.1*sinn) + continue elif c in ('m2','2n2','mu2','n2','nu2','lambda2','ms4','eps2','2sm6', '2sn6','mp1','mp3','sn4'): term1 = -0.03731*sinn + 0.00052*sin2n @@ -1249,6 +1265,10 @@ def nodal( f[:,i] = np.sqrt(term1 + term2 + 0.0981) u[:,i] = -2.0*nu_sec continue + elif c in ('k2',) and PERTH3_TYPE: + f[:,i] = 1.024 + 0.286*cosn + 0.008*cos2n + u[:,i] = dtr*(-17.7*sinn + 0.7*sin2n) + continue elif c in ('k2','sk4','2sk6','kp1'): term1 = -0.3108*sinn - 0.0324*sin2n term2 = 1.0 + 0.2853*cosn + 0.0324*cos2n @@ -1272,11 +1292,11 @@ def nodal( # Linear 3rd degree terms term1 = -0.01815*sinn term2 = 1.0 - 0.27837*cosn - elif c in ("q1'",): + elif c in ("q1'",): # Linear 3rd degree terms term1 = 0.3915*sinn + 0.033*sin2n + 0.061*sin2p term2 = 1.0 + 0.3915*cosn + 0.033*cos2n + 0.06*cos2p - elif c in ("j1'",): + elif c in ("j1'",): # Linear 3rd degree terms term1 = -0.438*sinn - 0.033*sin2n term2 = 1.0 + 0.372*cosn + 0.033*cos2n diff --git a/pyTMD/compute.py b/pyTMD/compute.py index 162cf434..e1018471 100644 --- a/pyTMD/compute.py +++ b/pyTMD/compute.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute.py -Written by Tyler Sutterley (07/2024) +Written by Tyler Sutterley (08/2024) Calculates tidal elevations for correcting elevation or imagery data Calculates tidal currents at locations and times @@ -60,6 +60,7 @@ interpolate.py: interpolation routines for spatial data UPDATE HISTORY: + Updated 08/2024: allow inferring only specific minor constituents Updated 07/2024: assert that data type is a known value make number of days to convert JD to MJD a variable added option to crop tide models to the domain of the input data @@ -191,7 +192,7 @@ def tide_elevations( ATLAS_FORMAT: str = 'ATLAS-netcdf', GZIP: bool = False, DEFINITION_FILE: str | pathlib.Path | IOBase | None = None, - DEFINITION_FORMAT: str = 'ascii', + DEFINITION_FORMAT: str = 'auto', CROP: bool = False, BOUNDS: list | np.ndarray | None = None, EPSG: str | int = 3031, @@ -202,6 +203,7 @@ def tide_elevations( EXTRAPOLATE: bool = False, CUTOFF: int | float=10.0, INFER_MINOR: bool = True, + MINOR_CONSTITUENTS: list | None = None, APPLY_FLEXURE: bool = False, FILL_VALUE: float = np.nan, **kwargs @@ -228,11 +230,12 @@ def tide_elevations( Tide model files are gzip compressed DEFINITION_FILE: str, pathlib.Path, io.IOBase or NoneType, default None Tide model definition file for use - DEFINITION_FORMAT: str, default 'ascii' + DEFINITION_FORMAT: str, default 'auto' Format for model definition file - ``'ascii'``: tab-delimited definition file - ``'json'``: JSON formatted definition file + - ``'auto'``: auto-detect the definition file format CROP: bool, default False Crop tide model data to (buffered) bounds BOUNDS: list, np.ndarray or NoneType, default None @@ -271,6 +274,8 @@ def tide_elevations( Set to ``np.inf`` to extrapolate for all points INFER_MINOR: bool, default True Infer the height values for minor tidal constituents + MINOR_CONSTITUENTS: list or None, default None + Specify constituents to infer APPLY_FLEXURE: bool, default False Apply ice flexure scaling factor to height values @@ -346,7 +351,7 @@ def tide_elevations( deltat = np.zeros((nt), dtype=np.float64) elif model.format in ('ATLAS-netcdf',): amp,ph,D,c = pyTMD.io.ATLAS.extract_constants(lon, lat, model.grid_file, - model.model_file, type=model.type, crop=CROP, bounds=BOUNDS, + model.model_file, type=model.type, crop=CROP, bounds=BOUNDS, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) # use delta time at 2000.0 to match TMD outputs @@ -374,6 +379,7 @@ def tide_elevations( hc = amp*np.exp(cph) # predict tidal elevations at time + minor_constituents = MINOR_CONSTITUENTS or model.minor if (TYPE.lower() == 'grid'): ny,nx = np.shape(x) tide = np.ma.zeros((ny,nx,nt),fill_value=FILL_VALUE) @@ -384,7 +390,8 @@ def tide_elevations( # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide[i], hc, c, - deltat=deltat[i], corrections=corrections) + deltat=deltat[i], corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components and reform grid @@ -398,7 +405,8 @@ def tide_elevations( # calculate values for minor constituents by inferrence if INFER_MINOR: minor = pyTMD.predict.infer_minor(ts.tide, hc, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) tide.data[:] += minor.data[:] elif (TYPE.lower() == 'time series'): nstation = len(x) @@ -411,7 +419,8 @@ def tide_elevations( # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide, HC, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components @@ -442,6 +451,7 @@ def tide_currents( EXTRAPOLATE: bool = False, CUTOFF: int | float=10.0, INFER_MINOR: bool = True, + MINOR_CONSTITUENTS: list | None = None, FILL_VALUE: float = np.nan, **kwargs ): @@ -467,11 +477,12 @@ def tide_currents( Tide model files are gzip compressed DEFINITION_FILE: str, pathlib.Path, io.IOBase or NoneType, default None Tide model definition file for use - DEFINITION_FORMAT: str, default 'ascii' + DEFINITION_FORMAT: str, default 'auto' Format for model definition file - ``'ascii'``: tab-delimited definition file - ``'json'``: JSON formatted definition file + - ``'auto'``: auto-detect the definition file format CROP: bool, default False Crop tide model data to (buffered) bounds BOUNDS: list, np.ndarray or NoneType, default None @@ -510,6 +521,8 @@ def tide_currents( Set to ``np.inf`` to extrapolate for all points INFER_MINOR: bool, default True Infer the height values for minor tidal constituents + MINOR_CONSTITUENTS: list or None, default None + Specify constituents to infer FILL_VALUE: float, default np.nan Output invalid value @@ -585,14 +598,14 @@ def tide_currents( deltat = np.zeros((nt), dtype=np.float64) elif model.format in ('ATLAS-netcdf',): amp,ph,D,c = pyTMD.io.ATLAS.extract_constants(lon, lat, model.grid_file, - model.model_file[t], type=t, crop=CROP, bounds=BOUNDS, + model.model_file[t], type=t, crop=CROP, bounds=BOUNDS, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) # use delta time at 2000.0 to match TMD outputs deltat = np.zeros((nt), dtype=np.float64) elif model.format in ('FES-ascii', 'FES-netcdf'): amp,ph = pyTMD.io.FES.extract_constants(lon, lat, model.model_file[t], - type=t, version=model.version, crop=CROP, bounds=BOUNDS, + type=t, version=model.version, crop=CROP, bounds=BOUNDS, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) # available model constituents @@ -606,6 +619,7 @@ def tide_currents( hc = amp*np.exp(cph) # predict tidal currents at time + minor_constituents = MINOR_CONSTITUENTS or model.minor if (TYPE.lower() == 'grid'): ny,nx = np.shape(x) tide[t] = np.ma.zeros((ny,nx,nt),fill_value=FILL_VALUE) @@ -616,7 +630,8 @@ def tide_currents( # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide[i], hc, c, - deltat=deltat[i], corrections=corrections) + deltat=deltat[i], corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components and reform grid @@ -630,7 +645,8 @@ def tide_currents( # calculate values for minor constituents by inferrence if INFER_MINOR: minor = pyTMD.predict.infer_minor(ts.tide, hc, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) tide[t].data[:] += minor.data[:] elif (TYPE.lower() == 'time series'): nstation = len(x) @@ -643,7 +659,8 @@ def tide_currents( # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide, HC, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components diff --git a/pyTMD/io/constituents.py b/pyTMD/io/constituents.py index c2f009e1..c6371d4c 100644 --- a/pyTMD/io/constituents.py +++ b/pyTMD/io/constituents.py @@ -260,7 +260,7 @@ def __next__(self): constituent = getattr(self, field) self.__index__ += 1 return (field, constituent) - + def __getitem__(self, key): return getattr(self, key) diff --git a/pyTMD/io/model.py b/pyTMD/io/model.py index c5389da3..1c095c64 100644 --- a/pyTMD/io/model.py +++ b/pyTMD/io/model.py @@ -1,11 +1,14 @@ #!/usr/bin/env python u""" model.py -Written by Tyler Sutterley (07/2024) +Written by Tyler Sutterley (08/2024) Retrieves tide model parameters for named tide models and from model definition files UPDATE HISTORY: + Updated 08/2024: added attribute for minor constituents to infer + allow searching over iterable glob strings in definition files + added option to try automatic detection of definition file format Updated 07/2024: added new FES2022 and FES2022_load to list of models added JSON format for model definition files use parse function from constituents class to extract names @@ -53,6 +56,7 @@ import json import pathlib import pyTMD.io.constituents +from collections.abc import Iterable class model: """Retrieves tide model parameters for named models or @@ -75,8 +79,10 @@ class model: HDF5 dataset string for output ATL12 tide heights compressed: bool Model files are gzip compressed - constituents: list + constituents: list or None Model constituents for ``FES`` models + minor: list or None + Minor constituents for inference description: str HDF5 ``description`` attribute string for output tide heights directory: str, pathlib.Path or None, default None @@ -133,6 +139,7 @@ def __init__(self, directory: str | pathlib.Path | None = None, **kwargs): # set initial attributes self.compressed = copy.copy(kwargs['compressed']) self.constituents = None + self.minor = None # set working data directory self.directory = None if directory is not None: @@ -514,6 +521,10 @@ def elevation(self, m: str): self.model_file = self.pathfinder(model_files) self.scale = 1.0/100.0 self.version = '4.7' + # list of minor constituents for inference + self.minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', + 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', + 'l2', 'l2b', 't2'] # model description and references self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.variable = 'tide_ocean' @@ -526,6 +537,10 @@ def elevation(self, m: str): self.model_file = self.pathfinder(model_files) self.scale = 1.0/1000.0 self.version = '4.7' + # list of minor constituents for inference + self.minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', + 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', + 'l2', 'l2b', 't2'] # model description and references self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.variable = 'tide_load' @@ -537,6 +552,10 @@ def elevation(self, m: str): self.model_file = self.pathfinder(model_files) self.scale = 1.0/100.0 self.version = '4.8' + # list of minor constituents for inference + self.minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', + 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', + 'l2', 'l2b', 't2'] # model description and references self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.variable = 'tide_ocean' @@ -549,6 +568,10 @@ def elevation(self, m: str): self.model_file = self.pathfinder(model_files) self.scale = 1.0/1000.0 self.version = '4.8' + # list of minor constituents for inference + self.minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', + 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', + 'l2', 'l2b', 't2'] # model description and references self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.variable = 'tide_load' @@ -560,6 +583,10 @@ def elevation(self, m: str): self.model_file = self.pathfinder(model_files) self.scale = 1.0/100.0 self.version = '4.10' + # list of minor constituents for inference + self.minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', + 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', + 'l2', 'l2b', 't2'] # model description and references self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.variable = 'tide_ocean' @@ -572,6 +599,10 @@ def elevation(self, m: str): self.model_file = self.pathfinder(model_files) self.scale = 1.0/1000.0 self.version = '4.10' + # list of minor constituents for inference + self.minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', + 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', + 'l2', 'l2b', 't2'] # model description and references self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.variable = 'tide_load' @@ -1380,7 +1411,7 @@ def pathfinder(self, model_file: str | pathlib.Path | list): def from_file(self, definition_file: str | pathlib.Path | io.IOBase, - format: str = 'ascii' + format: str = 'auto' ): """ Create a model object from an input definition file @@ -1389,11 +1420,12 @@ def from_file(self, ---------- definition_file: str, pathlib.Path or io.IOBase model definition file for creating model object - format: str + format: str, default 'json' format of the input definition file - ``'ascii'``: tab-delimited definition file - ``'json'``: JSON formatted definition file + - ``'auto'``: attempt to auto-detect format """ # Opening definition file and assigning file ID number if isinstance(definition_file, io.IOBase): @@ -1406,11 +1438,33 @@ def from_file(self, self.from_ascii(fid) elif (format.lower() == 'json'): self.from_json(fid) + elif (format.lower() == 'auto'): + self.from_auto(fid) # close the definition file fid.close() # return the model object return self + def from_auto(self, fid: io.IOBase): + # attempt to read from JSON + try: + self.from_json(fid) + except json.decoder.JSONDecodeError as exc: + pass + else: + return self + # rewind the definition file + fid.seek(0) + # attempt to read from ascii + try: + self.from_ascii(fid) + except Exception as exc: + pass + else: + return self + # raise an exception + raise IOError('Cannot load definition file') + def from_ascii(self, fid: io.IOBase): """ Load and parse tab-delimited definition file @@ -1469,8 +1523,11 @@ def from_ascii(self, fid: io.IOBase): elif (temp.type == ['u','v']) and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - model_file = list(temp.directory.glob(glob_string)) - # copy to model file and directory dictionaries + model_file = [] + # search singular glob string or iterable glob strings + for p in re.split(r'[\s\,]+', temp.model_file): + model_file.extend(temp.directory.glob(p)) + # update model file temp.model_file = dict(u=model_file, v=model_file) # attempt to extract model directory try: @@ -1481,7 +1538,10 @@ def from_ascii(self, fid: io.IOBase): elif (temp.type == 'z') and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - temp.model_file = list(temp.directory.glob(glob_string)) + temp.model_file = [] + # search singular glob string or iterable glob strings + for p in re.split(r'[\s\,]+', glob_string): + temp.model_file.extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file[0].parent @@ -1502,12 +1562,16 @@ def from_ascii(self, fid: io.IOBase): # extract model files if (temp.type == ['u','v']) and (temp.directory is not None): # split model file string at semicolon - model_file = temp.model_file.split(';') - glob_string = dict(u=model_file[0], v=model_file[1]) + glob_string = temp.model_file.split(';') # use glob strings to find files in directory temp.model_file = {} - temp.model_file['u'] = list(temp.directory.glob(glob_string['u'])) - temp.model_file['v'] = list(temp.directory.glob(glob_string['v'])) + temp.model_file['u'] = [] + temp.model_file['v'] = [] + # search singular glob string or iterable glob strings + for p in re.split(r'[\s\,]+', glob_string[0]): + temp.model_file['u'].extend(temp.directory.glob(p)) + for p in re.split(r'[\s\,]+', glob_string[1]): + temp.model_file['v'].extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file['u'][0].parent @@ -1517,7 +1581,10 @@ def from_ascii(self, fid: io.IOBase): elif (temp.type == 'z') and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - temp.model_file = list(temp.directory.glob(glob_string)) + temp.model_file = [] + # search singular glob string or iterable glob strings + for p in re.split(r'[\s\,]+', glob_string): + temp.model_file.extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file[0].parent @@ -1543,12 +1610,16 @@ def from_ascii(self, fid: io.IOBase): # extract model files if (temp.type == ['u','v']) and (temp.directory is not None): # split model file string at semicolon - model_file = temp.model_file.split(';') - glob_string = dict(u=model_file[0], v=model_file[1]) + glob_string = temp.model_file.split(';') # use glob strings to find files in directory temp.model_file = {} - temp.model_file['u'] = list(temp.directory.glob(glob_string['u'])) - temp.model_file['v'] = list(temp.directory.glob(glob_string['v'])) + temp.model_file['u'] = [] + temp.model_file['v'] = [] + # search singular glob string or iterable glob strings + for p in re.split(r'[\s\,]+', glob_string[0]): + temp.model_file['u'].extend(temp.directory.glob(p)) + for p in re.split(r'[\s\,]+', glob_string[1]): + temp.model_file['v'].extend(temp.directory.glob(p)) # build model directory dictionaries temp.model_directory = {} for key, val in temp.model_file.items(): @@ -1561,7 +1632,11 @@ def from_ascii(self, fid: io.IOBase): elif (temp.type == 'z') and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - temp.model_file = list(temp.directory.glob(glob_string)) + temp.model_file = [] + # search singular glob string or iterable glob strings + for p in re.split(r'[\s\,]+', glob_string): + temp.model_file.extend(temp.directory.glob(p)) + # build model directory temp.model_directory = temp.model_file[0].parent # attempt to extract model directory try: @@ -1638,7 +1713,15 @@ def from_json(self, fid: io.IOBase): if (temp.type == ['u','v']) and (temp.directory is not None): # use glob strings to find files in directory for key, glob_string in temp.model_file.items(): - temp.model_file[key] = list(temp.directory.glob(glob_string)) + # search singular glob string or iterable glob strings + if isinstance(glob_string, str): + # singular glob string + temp.model_file[key] = list(temp.directory.glob(glob_string)) + elif isinstance(glob_string, Iterable): + # iterable glob strings + temp.model_file[key] = [] + for p in glob_string: + temp.model_file[key].extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file['u'][0].parent @@ -1648,8 +1731,15 @@ def from_json(self, fid: io.IOBase): elif (temp.type == 'z') and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - - temp.model_file = list(temp.directory.glob(glob_string)) + # search singular glob string or iterable glob strings + if isinstance(glob_string, str): + # singular glob string + temp.model_file = list(temp.directory.glob(glob_string)) + elif isinstance(glob_string, Iterable): + # iterable glob strings + temp.model_file = [] + for p in glob_string: + temp.model_file.extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file[0].parent @@ -1659,7 +1749,7 @@ def from_json(self, fid: io.IOBase): elif (temp.type == ['u','v']) and isinstance(temp.model_file, dict): # resolve paths to model files for each direction for key, model_file in temp.model_file.items(): - temp.model_file[key] = [pathlib.Path(f).expanduser() for f in + temp.model_file[key] = [pathlib.Path(f).expanduser() for f in model_file] # copy directory dictionaries temp.model_directory = temp.model_file['u'][0].parent @@ -1683,7 +1773,15 @@ def from_json(self, fid: io.IOBase): if (temp.type == ['u','v']) and (temp.directory is not None): # use glob strings to find files in directory for key, glob_string in temp.model_file.items(): - temp.model_file[key] = list(temp.directory.glob(glob_string)) + # search singular glob string or iterable glob strings + if isinstance(glob_string, str): + # singular glob string + temp.model_file[key] = list(temp.directory.glob(glob_string)) + elif isinstance(glob_string, Iterable): + # iterable glob strings + temp.model_file[key] = [] + for p in glob_string: + temp.model_file[key].extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file['u'][0].parent @@ -1693,7 +1791,15 @@ def from_json(self, fid: io.IOBase): elif (temp.type == 'z') and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - temp.model_file = list(temp.directory.glob(glob_string)) + # search singular glob string or iterable glob strings + if isinstance(glob_string, str): + # singular glob string + temp.model_file = list(temp.directory.glob(glob_string)) + elif isinstance(glob_string, Iterable): + # iterable glob strings + temp.model_file = [] + for p in glob_string: + temp.model_file.extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file[0].parent @@ -1703,7 +1809,7 @@ def from_json(self, fid: io.IOBase): elif (temp.type == ['u','v']): # resolve paths to model files for each direction for key, model_file in temp.model_file.items(): - temp.model_file[key] = [pathlib.Path(f).expanduser() for f in + temp.model_file[key] = [pathlib.Path(f).expanduser() for f in model_file] # copy to directory dictionaries temp.model_directory = temp.model_file['u'][0].parent @@ -1717,7 +1823,15 @@ def from_json(self, fid: io.IOBase): if (temp.type == ['u','v']) and (temp.directory is not None): # use glob strings to find files in directory for key, glob_string in temp.model_file.items(): - temp.model_file[key] = list(temp.directory.glob(glob_string)) + # search singular glob string or iterable glob strings + if isinstance(glob_string, str): + # singular glob string + temp.model_file[key] = list(temp.directory.glob(glob_string)) + elif isinstance(glob_string, Iterable): + # iterable glob strings + temp.model_file[key] = [] + for p in glob_string: + temp.model_file[key].extend(temp.directory.glob(p)) # build model directory dictionaries temp.model_directory = {} for key, val in temp.model_file.items(): @@ -1730,8 +1844,15 @@ def from_json(self, fid: io.IOBase): elif (temp.type == 'z') and (temp.directory is not None): # use glob strings to find files in directory glob_string = copy.copy(temp.model_file) - - temp.model_file = list(temp.directory.glob(glob_string)) + # search singular glob string or iterable glob strings + if isinstance(glob_string, str): + # singular glob string + temp.model_file = list(temp.directory.glob(glob_string)) + elif isinstance(glob_string, Iterable): + # iterable glob strings + temp.model_file = [] + for p in glob_string: + temp.model_file.extend(temp.directory.glob(p)) # attempt to extract model directory try: temp.model_directory = temp.model_file[0].parent @@ -1740,7 +1861,7 @@ def from_json(self, fid: io.IOBase): elif (temp.type == ['u','v']): # resolve paths to model files for each direction for key, model_file in temp.model_file.items(): - temp.model_file[key] = [pathlib.Path(f).expanduser() for f in + temp.model_file[key] = [pathlib.Path(f).expanduser() for f in model_file] # build model directory dictionaries temp.model_directory = {} @@ -1778,7 +1899,7 @@ def validate_format(self): self.format = m[1] # assert that tide model is a known format assert self.format in self.formats() - + def from_dict(self, d: dict): """ Create a model object from a python dictionary @@ -1846,7 +1967,7 @@ def parse_file( raise ValueError(f'Constituent not found in file {model_file}') else: return None - + @staticmethod def to_bool(val: str) -> bool: """ diff --git a/pyTMD/predict.py b/pyTMD/predict.py index 67691dd5..d9d7f70f 100644 --- a/pyTMD/predict.py +++ b/pyTMD/predict.py @@ -21,6 +21,9 @@ UPDATE HISTORY: Updated 08/2024: minor nodal angle corrections in radians to match arguments + include inference of eps2 and eta2 when predicting from GOT models + add keyword argument to allow inferring specific minor constituents + use nodal arguments for all non-OTIS model type cases Updated 07/2024: use normalize_angle from pyTMD astro module make number of days to convert tide time to MJD a variable Updated 02/2024: changed class name for ellipsoid parameters to datum @@ -114,7 +117,7 @@ def map(t: float | np.ndarray, pyTMD.arguments._constituent_parameters(c) # add component for constituent to output tidal elevation th = omega*t*86400.0 + ph + pu[0,k] - elif corrections in ('GOT', 'FES'): + else: th = G[0,k]*np.pi/180.0 + pu[0,k] # sum over all tides ht.data[:] += pf[0,k]*hc.real[:,k]*np.cos(th) - \ @@ -180,7 +183,7 @@ def drift(t: float | np.ndarray, pyTMD.arguments._constituent_parameters(c) # add component for constituent to output tidal elevation th = omega*t*86400.0 + ph + pu[:,k] - elif corrections in ('GOT', 'FES'): + else: th = G[:,k]*np.pi/180.0 + pu[:,k] # sum over all tides ht.data[:] += pf[:,k]*hc.real[:,k]*np.cos(th) - \ @@ -246,7 +249,7 @@ def time_series(t: float | np.ndarray, pyTMD.arguments._constituent_parameters(c) # add component for constituent to output tidal time series th = omega*t*86400.0 + ph + pu[:,k] - elif corrections in ('GOT', 'FES'): + else: th = G[:,k]*np.pi/180.0 + pu[:,k] # sum over all tides at location ht.data[:] += pf[:,k]*hc.real[0,k]*np.cos(th) - \ @@ -278,6 +281,8 @@ def infer_minor( time correction for converting to Ephemeris Time (days) corrections: str, default 'OTIS' use nodal corrections from OTIS/ATLAS or GOT/FES models + minor: list or None, default None + tidal constituent IDs Returns ------- @@ -304,6 +309,8 @@ def infer_minor( # set default keyword arguments kwargs.setdefault('deltat', 0.0) kwargs.setdefault('corrections', 'OTIS') + # list of minor constituents + kwargs.setdefault('minor', None) # number of constituents npts, nc = np.shape(zmajor) @@ -328,9 +335,10 @@ def infer_minor( raise Exception('Not enough constituents for inference') # list of minor constituents - minor = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', 'chi1', 'pi1', - 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', 'nu2', 'lambda2', - 'l2', 'l2b', 't2', 'eps2', 'eta2'] + minor_constituents = ['2q1', 'sigma1', 'rho1', 'm1b', 'm1', + 'chi1', 'pi1', 'phi1', 'theta1', 'j1', 'oo1', '2n2', 'mu2', + 'nu2', 'lambda2', 'l2', 'l2b', 't2', 'eps2', 'eta2'] + minor = kwargs['minor'] or minor_constituents # only add minor constituents that are not on the list of major values minor_indices = [i for i,m in enumerate(minor) if m not in constituents] @@ -354,7 +362,7 @@ def infer_minor( zmin[:,15] = 0.0131*z[:,5] + 0.0326*z[:,6]# L2 zmin[:,16] = 0.0033*z[:,5] + 0.0082*z[:,6]# L2 zmin[:,17] = 0.0585*z[:,6]# t2 - # additional coefficients for FES models + # additional coefficients for FES and GOT models if kwargs['corrections'] in ('FES',): # spline coefficients for admittances mu2 = [0.069439968323, 0.351535557706, -0.046278307672] @@ -369,6 +377,9 @@ def infer_minor( zmin[:,17] = t2[0]*z[:,7] + t2[1]*z[:,4] + t2[2]*z[:,5]# t2 zmin[:,18] = 0.53285*z[:,8] - 0.03304*z[:,4]# eps2 zmin[:,19] = -0.0034925*z[:,5] + 0.0831707*z[:,7]# eta2 + elif kwargs['corrections'] in ('GOT',): + zmin[:,18] = 0.53285*z[:,8] - 0.03304*z[:,4]# eps2 + zmin[:,19] = -0.0034925*z[:,5] + 0.0831707*z[:,7]# eta2 # load the nodal corrections for minor constituents # convert time to Modified Julian Days (MJD) diff --git a/pyTMD/solve/constants.py b/pyTMD/solve/constants.py index feda3fda..22f0b0dc 100644 --- a/pyTMD/solve/constants.py +++ b/pyTMD/solve/constants.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" constants.py -Written by Tyler Sutterley (01/2024) +Written by Tyler Sutterley (08/2024) Routines for estimating the harmonic constants for ocean tides REFERENCES: @@ -20,6 +20,7 @@ astro.py: computes the basic astronomical mean longitudes UPDATE HISTORY: + Updated 08/2024: use nodal arguments for all non-OTIS model type cases Updated 01/2024: moved to solve subdirectory Written 12/2023 """ @@ -103,7 +104,7 @@ def constants(t: float | np.ndarray, amp, ph, omega, alpha, species = \ pyTMD.arguments._constituent_parameters(c) th = omega*t*86400.0 + ph + pu[:,k] - elif corrections in ('GOT', 'FES'): + else: th = G[:,k]*np.pi/180.0 + pu[:,k] # add constituent to design matrix M.append(pf[:,k]*np.cos(th)) diff --git a/pyTMD/spatial.py b/pyTMD/spatial.py index 4133c157..ea6276a1 100644 --- a/pyTMD/spatial.py +++ b/pyTMD/spatial.py @@ -712,7 +712,7 @@ def from_parquet(filename: str, **kwargs): dinput['x'] = shapely.get_x(geometry) dinput['y'] = shapely.get_y(geometry) # remap columns to default names - if kwargs['columns'] is not None: + if kwargs['columns'] is not None: field_mapping = default_field_mapping(kwargs['columns']) remap = inverse_mapping(field_mapping) dinput.rename(columns=remap, inplace=True) @@ -1201,7 +1201,7 @@ def to_parquet( srs.SetFromUserInput(json.dumps(kwargs['crs'])) # add projection information to attributes attributes['crs'] = copy.copy(kwargs['crs']) - # convert spatial coordinates to WKB encoded geometry + # convert spatial coordinates to WKB encoded geometry if kwargs['geoparquet'] and (kwargs['geometry_encoding'] == 'WKB'): # get geometry columns primary_column = kwargs['primary_column'] diff --git a/pyTMD/time.py b/pyTMD/time.py index 6d6243dc..eaf7cc6b 100644 --- a/pyTMD/time.py +++ b/pyTMD/time.py @@ -198,7 +198,7 @@ def convert_datetime(*args, **kwargs): warnings.warn("Deprecated. Please use timescale.time.convert_datetime", DeprecationWarning) return timescale.time.convert_datetime(*args, **kwargs) - + # PURPOSE: convert times from seconds since epoch1 to time since epoch2 def convert_delta_time(*args, **kwargs): """ diff --git a/pyTMD/utilities.py b/pyTMD/utilities.py index d8f100fb..dcea866e 100644 --- a/pyTMD/utilities.py +++ b/pyTMD/utilities.py @@ -102,7 +102,7 @@ def import_dependency( ): """ Import an optional dependency - + Adapted from ``pandas.compat._optional::import_optional_dependency`` Parameters @@ -899,7 +899,7 @@ def from_json( raise Exception(msg) from exc else: # load JSON response - return json.loads(response.read()) + return json.loads(response.read()) # PURPOSE: attempt to build an opener with netrc def attempt_login( diff --git a/scripts/aviso_fes_tides.py b/scripts/aviso_fes_tides.py index 16c9bfd6..732a0a65 100644 --- a/scripts/aviso_fes_tides.py +++ b/scripts/aviso_fes_tides.py @@ -248,7 +248,7 @@ def aviso_fes_list(MODEL, f, logger, remote_path = [*subdir, fi] LZMA = fi.endswith('.xz') ftp_download(logger, f, remote_path, local_dir, - LZMA=LZMA, + LZMA=LZMA, GZIP=GZIP, CHUNK=32768, MODE=MODE @@ -277,7 +277,7 @@ def ftp_list(ftp, remote_path, basename=False, pattern=None, sort=False): # PURPOSE: pull file from a remote ftp server and decompress if tar file def ftp_download(logger, ftp, remote_path, local_dir, LZMA=None, - TARMODE=None, + TARMODE=None, FLATTEN=None, GZIP=False, CHUNK=8192, diff --git a/scripts/compute_tidal_currents.py b/scripts/compute_tidal_currents.py index 1f056b8f..a865a30e 100755 --- a/scripts/compute_tidal_currents.py +++ b/scripts/compute_tidal_currents.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tidal_currents.py -Written by Tyler Sutterley (07/2024) +Written by Tyler Sutterley (08/2024) Calculates zonal and meridional tidal currents for an input file Uses OTIS format tidal solutions provided by Oregon State University and ESR @@ -57,7 +57,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points - --infer-minor: Infer the current values for minor constituents + --infer-minor: Infer values for minor constituents + --minor-constituents: Minor constituents to infer -f X, --fill-value X: Invalid value for spatial fields -V, --verbose: Verbose output of processing run -M X, --mode X: Permission mode of output file @@ -97,6 +98,8 @@ predict.py: predict tidal values using harmonic constants UPDATE HISTORY: + Updated 08/2024: allow inferring only specific minor constituents + added option to try automatic detection of definition file format Updated 07/2024: assert that data type is a known value added option to crop to the domain of the input data added option to use JSON format definition files @@ -205,7 +208,7 @@ def compute_tidal_currents(tide_dir, input_file, output_file, ATLAS_FORMAT='ATLAS-netcdf', GZIP=True, DEFINITION_FILE=None, - DEFINITION_FORMAT='ascii', + DEFINITION_FORMAT='auto', CROP=False, FORMAT='csv', VARIABLES=[], @@ -220,6 +223,7 @@ def compute_tidal_currents(tide_dir, input_file, output_file, EXTRAPOLATE=False, CUTOFF=None, INFER_MINOR=False, + MINOR_CONSTITUENTS=None, FILL_VALUE=-9999.0, MODE=0o775): @@ -324,6 +328,8 @@ def compute_tidal_currents(tide_dir, input_file, output_file, # calculate constituent oscillation hc = amp*np.exp(cph) + # minor constituents to infer + minor_constituents = model.minor or MINOR_CONSTITUENTS # predict tidal currents at time if (TYPE == 'grid'): tide[t] = np.ma.zeros((ny,nx,nt), fill_value=FILL_VALUE) @@ -334,7 +340,8 @@ def compute_tidal_currents(tide_dir, input_file, output_file, # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide[i], hc, c, - deltat=deltat[i], corrections=corrections) + deltat=deltat[i], corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components and reform grid @@ -349,7 +356,8 @@ def compute_tidal_currents(tide_dir, input_file, output_file, # calculate values for minor constituents by inferrence if INFER_MINOR: minor = pyTMD.predict.infer_minor(ts.tide, hc, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) tide[t].data[:] += minor.data[:] elif (TYPE == 'time series'): tide[t] = np.ma.zeros((nstation,nt), fill_value=FILL_VALUE) @@ -362,7 +370,8 @@ def compute_tidal_currents(tide_dir, input_file, output_file, # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide, HC, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components @@ -481,7 +490,7 @@ def arguments(): type=pathlib.Path, help='Tide model definition file') parser.add_argument('--definition-format', - type=str, default='ascii', choices=('ascii', 'json'), + type=str, default='auto', choices=('ascii','json','auto'), help='Format for model definition file') # crop tide model to (buffered) bounds of data parser.add_argument('--crop', '-C', @@ -546,7 +555,11 @@ def arguments(): # infer minor constituents from major parser.add_argument('--infer-minor', default=False, action='store_true', - help='Infer the current values for minor constituents') + help='Infer values for minor constituents') + # specify minor constituents to infer + parser.add_argument('--minor-constituents', + type=str, nargs='+', + help='Minor constituents to infer') # fill value for output spatial fields parser.add_argument('--fill-value','-f', type=float, default=-9999.0, @@ -601,6 +614,7 @@ def main(): EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, INFER_MINOR=args.infer_minor, + MINOR_CONSTITUENTS=args.minor_constituents, FILL_VALUE=args.fill_value, MODE=args.mode) except Exception as exc: diff --git a/scripts/compute_tidal_elevations.py b/scripts/compute_tidal_elevations.py index 419e9607..0d404edf 100755 --- a/scripts/compute_tidal_elevations.py +++ b/scripts/compute_tidal_elevations.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tidal_elevations.py -Written by Tyler Sutterley (07/2024) +Written by Tyler Sutterley (08/2024) Calculates tidal elevations for an input file Uses OTIS format tidal solutions provided by Oregon State University and ESR @@ -58,7 +58,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points - --infer-minor: Infer the height values for minor constituents + --infer-minor: Infer values for minor constituents + --minor-constituents: Minor constituents to infer --apply-flexure: Apply ice flexure scaling factor to height values Only valid for models containing flexure fields -f X, --fill-value X: Invalid value for spatial fields @@ -100,6 +101,8 @@ predict.py: predict tidal values using harmonic constants UPDATE HISTORY: + Updated 08/2024: allow inferring only specific minor constituents + added option to try automatic detection of definition file format Updated 07/2024: assert that data type is a known value added option to crop to the domain of the input data added option to use JSON format definition files @@ -183,7 +186,7 @@ def info(args): if hasattr(os, 'getppid'): logging.debug(f'parent process: {os.getppid():d}') logging.debug(f'process id: {os.getpid():d}') - + # PURPOSE: try to get the projection information for the input file def get_projection(attributes, PROJECTION): # coordinate reference system string from file @@ -224,6 +227,7 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, EXTRAPOLATE=False, CUTOFF=None, INFER_MINOR=False, + MINOR_CONSTITUENTS=None, APPLY_FLEXURE=False, FILL_VALUE=-9999.0, MODE=0o775): @@ -266,6 +270,7 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, crs1 = get_projection(attributes, PROJECTION) crs2 = pyproj.CRS.from_epsg(4326) transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True) + assert TYPE.lower() in ('grid', 'drift', 'time series') if (TYPE == 'grid'): ny, nx = (len(dinput['y']), len(dinput['x'])) gridx, gridy = np.meshgrid(dinput['x'], dinput['y']) @@ -331,8 +336,9 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, # calculate constituent oscillation hc = amp*np.exp(cph) + # minor constituents to infer + minor_constituents = model.minor or MINOR_CONSTITUENTS # predict tidal elevations at time - assert TYPE.lower() in ('grid', 'drift', 'time series') if (TYPE == 'grid'): tide = np.ma.zeros((ny,nx,nt), fill_value=FILL_VALUE) tide.mask = np.zeros((ny,nx,nt),dtype=bool) @@ -342,7 +348,8 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide[i], hc, c, - deltat=deltat[i], corrections=corrections) + deltat=deltat[i], corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components and reform grid @@ -356,7 +363,8 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, # calculate values for minor constituents by inferrence if INFER_MINOR: minor = pyTMD.predict.infer_minor(ts.tide, hc, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) tide.data[:] += minor.data[:] elif (TYPE == 'time series'): tide = np.ma.zeros((nstation,nt), fill_value=FILL_VALUE) @@ -369,7 +377,8 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, # calculate values for minor constituents by inferrence if INFER_MINOR: MINOR = pyTMD.predict.infer_minor(ts.tide, HC, c, - deltat=deltat, corrections=corrections) + deltat=deltat, corrections=corrections, + minor=minor_constituents) else: MINOR = np.ma.zeros_like(TIDE) # add major and minor components @@ -478,7 +487,7 @@ def arguments(): type=pathlib.Path, help='Tide model definition file') parser.add_argument('--definition-format', - type=str, default='ascii', choices=('ascii', 'json'), + type=str, default='auto', choices=('ascii','json','auto'), help='Format for model definition file') # crop tide model to (buffered) bounds of data parser.add_argument('--crop', '-C', @@ -543,7 +552,11 @@ def arguments(): # infer minor constituents from major parser.add_argument('--infer-minor', default=False, action='store_true', - help='Infer the height values for minor constituents') + help='Infer values for minor constituents') + # specify minor constituents to infer + parser.add_argument('--minor-constituents', + type=str, nargs='+', + help='Minor constituents to infer') # apply flexure scaling factors to height values parser.add_argument('--apply-flexure', default=False, action='store_true', @@ -604,6 +617,7 @@ def main(): CUTOFF=args.cutoff, APPLY_FLEXURE=args.apply_flexure, INFER_MINOR=args.infer_minor, + MINOR_CONSTITUENTS=args.minor_constituents, FILL_VALUE=args.fill_value, MODE=args.mode) except Exception as exc: diff --git a/test/test_arguments.py b/test/test_arguments.py index b5fbca6d..a7a3df99 100644 --- a/test/test_arguments.py +++ b/test/test_arguments.py @@ -33,7 +33,7 @@ def test_arguments(corrections): # use a random set of modified Julian days MJD = np.random.randint(58000, 61000, size=10) # set function for astronomical longitudes - ASTRO5 = True if corrections in ('GOT', 'FES') else False + ASTRO5 = corrections not in ('OTIS','ATLAS','TMD3','netcdf') # convert from Modified Julian Dates into Ephemeris Time s, h, p, omega, pp = pyTMD.astro.mean_longitudes(MJD, ASTRO5=ASTRO5) @@ -70,7 +70,7 @@ def test_arguments(corrections): arg[:,16] = t1 - h - 90.0 # p1 if corrections in ('OTIS', 'ATLAS', 'TMD3', 'netcdf'): arg[:,17] = t1 + 90.0 # s1 - elif corrections in ('GOT', 'FES'): + else: arg[:,17] = t1 + 180.0 # s1 (Doodson's phase) arg[:,18] = t1 + h + 90.0 # k1 arg[:,19] = t1 + 2.0*h - pp + 90.0 # psi1 @@ -149,7 +149,7 @@ def test_table(corrections): coef[:,16] = [1.0, 1.0, -2.0, 0.0, 0.0, 0.0, -1.0] # p1 if corrections in ('OTIS', 'ATLAS', 'TMD3', 'netcdf'): coef[:,17] = [1.0, 1.0, -1.0, 0.0, 0.0, 0.0, 1.0] # s1 - elif corrections in ('GOT', 'FES'): + else: coef[:,17] = [1.0, 1.0, -1.0, 0.0, 0.0, 0.0, 2.0] # s1 (Doodson's phase) coef[:,18] = [1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0] # k1 coef[:,19] = [1.0, 1.0, 1.0, 0.0, 0.0, -1.0, 1.0] # psi1 @@ -195,7 +195,7 @@ def test_table(corrections): coef[:,58] = [2.0, -3.0, 2.0, 1.0, 0.0, 0.0, 0.0] # eps2 # mean sea level coef[:,59] = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] # z0 - + # get Doodson coefficients test = pyTMD.arguments._arguments_table(corrections=corrections) # validate arguments between methods @@ -259,7 +259,7 @@ def test_nodal(corrections, M1): # use a random set of modified Julian days MJD = np.random.randint(58000, 61000, size=10) # set function for astronomical longitudes - ASTRO5 = True if corrections in ('GOT', 'FES') else False + ASTRO5 = corrections not in ('OTIS','ATLAS','TMD3','netcdf') # convert from Modified Julian Dates into Ephemeris Time s, h, p, n, pp = pyTMD.astro.mean_longitudes(MJD, ASTRO5=ASTRO5) diff --git a/test/test_model.py b/test/test_model.py index f25a8177..61a75a98 100644 --- a/test/test_model.py +++ b/test/test_model.py @@ -16,7 +16,7 @@ filename = inspect.getframeinfo(inspect.currentframe()).filename filepath = pathlib.Path(filename).absolute().parent -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_CATS2008(file_format): """Tests the reading of the CATS2008 model definition file """ @@ -24,6 +24,7 @@ def test_definition_CATS2008(file_format): definition_file = {} definition_file['ascii'] = 'model_CATS2008.def' definition_file['json'] = 'model_CATS2008.json' + definition_file['auto'] = 'model_CATS2008.json' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -45,7 +46,7 @@ def test_definition_CATS2008(file_format): assert m.gla12 == 'd_ocElv' assert m.long_name == 'ocean_tide_elevation' -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_FES(file_format): """Tests the reading of the FES2014 model definition file """ @@ -53,6 +54,7 @@ def test_definition_FES(file_format): definition_file = {} definition_file['ascii'] = 'model_FES2014.def' definition_file['json'] = 'model_FES2014.json' + definition_file['auto'] = 'model_FES2014.json' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -105,7 +107,7 @@ def test_definition_FES(file_format): assert m.long_name == 'ocean_tide_elevation' # PURPOSE: test glob file functionality -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_FES_glob(file_format): """Tests the reading of the FES2014 model definition file with glob file searching @@ -114,6 +116,7 @@ def test_definition_FES_glob(file_format): definition_file = {} definition_file['ascii'] = 'model_FES2014.def' definition_file['json'] = 'model_FES2014.json' + definition_file['auto'] = 'model_FES2014.def' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -145,7 +148,7 @@ def test_definition_FES_glob(file_format): fid = io.StringIO() glob_string = r'fes2014/ocean_tide/*.nc.gz' attrs = ['name','format','compressed','type','scale','version'] - if (file_format == 'ascii'): + if file_format in ('ascii', 'auto'): # create tab-delimited definition file for attr in attrs: val = getattr(m,attr) @@ -177,7 +180,7 @@ def test_definition_FES_glob(file_format): # clean up model shutil.rmtree(filepath.joinpath('fes2014')) -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_FES_currents(file_format): """Tests the reading of the FES2014 model definition file for currents """ @@ -185,6 +188,7 @@ def test_definition_FES_currents(file_format): definition_file = {} definition_file['ascii'] = 'model_FES2014_currents.def' definition_file['json'] = 'model_FES2014_currents.json' + definition_file['auto'] = 'model_FES2014_currents.json' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -250,7 +254,7 @@ def test_definition_FES_currents(file_format): assert m.long_name['v'] == 'meridional_tidal_current' # PURPOSE: test glob file functionality -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_FES_currents_glob(file_format): """Tests the reading of the FES2014 model definition file with glob file searching for currents @@ -259,6 +263,7 @@ def test_definition_FES_currents_glob(file_format): definition_file = {} definition_file['ascii'] = 'model_FES2014_currents.def' definition_file['json'] = 'model_FES2014_currents.json' + definition_file['auto'] = 'model_FES2014_currents.def' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -311,7 +316,7 @@ def test_definition_FES_currents_glob(file_format): attrs = ['name','format','compressed','type','scale','version'] glob_string_u = r'fes2014/eastward_velocity/*.nc.gz' glob_string_v = r'fes2014/northward_velocity/*.nc.gz' - if (file_format == 'ascii'): + if file_format in ('ascii','auto'): # create tab-delimited definition file for attr in attrs: val = getattr(m,attr) @@ -344,7 +349,7 @@ def test_definition_FES_currents_glob(file_format): # clean up model shutil.rmtree(filepath.joinpath('fes2014')) -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_GOT(file_format): """Tests the reading of the GOT4.10 model definition file """ @@ -352,6 +357,7 @@ def test_definition_GOT(file_format): definition_file = {} definition_file['ascii'] = 'model_GOT4.10.def' definition_file['json'] = 'model_GOT4.10.json' + definition_file['auto'] = 'model_GOT4.10.json' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -387,7 +393,7 @@ def test_definition_GOT(file_format): assert m.long_name == 'load_tide_elevation' # PURPOSE: test glob file functionality -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_GOT_glob(file_format): """Tests the reading of the GOT4.10 model definition file with glob file searching @@ -396,9 +402,10 @@ def test_definition_GOT_glob(file_format): definition_file = {} definition_file['ascii'] = 'model_GOT4.10.def' definition_file['json'] = 'model_GOT4.10.json' + definition_file['auto'] = 'model_GOT4.10.def' val = definition_file[file_format] # read model definition file for format - m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) # model files model_files = ['GOT4.10c/grids_loadtide/k1load.d.gz', 'GOT4.10c/grids_loadtide/k2load.d.gz', @@ -419,7 +426,7 @@ def test_definition_GOT_glob(file_format): fid = io.StringIO() attrs = ['name','format','compressed','type','scale'] glob_string = r'GOT4.10c/grids_loadtide/*.d.gz' - if (file_format == 'ascii'): + if file_format in ('ascii','auto'): # create tab-delimited definition file for attr in attrs: val = getattr(m,attr) @@ -449,7 +456,7 @@ def test_definition_GOT_glob(file_format): # clean up model shutil.rmtree(filepath.joinpath('GOT4.10c')) -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_TPXO9(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file """ @@ -457,9 +464,10 @@ def test_definition_TPXO9(file_format): definition_file = {} definition_file['ascii'] = 'model_TPXO9-atlas-v5.def' definition_file['json'] = 'model_TPXO9-atlas-v5.json' + definition_file['auto'] = 'model_TPXO9-atlas-v5.json' val = definition_file[file_format] # read model definition file for format - m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) # model files model_files = ['TPXO9_atlas_v5/h_2n2_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_k1_tpxo9_atlas_30_v5.nc', @@ -499,7 +507,7 @@ def test_definition_TPXO9(file_format): assert m.long_name == 'ocean_tide_elevation' # PURPOSE: test glob file functionality -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_TPXO9_glob(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file with glob file searching @@ -508,9 +516,10 @@ def test_definition_TPXO9_glob(file_format): definition_file = {} definition_file['ascii'] = 'model_TPXO9-atlas-v5.def' definition_file['json'] = 'model_TPXO9-atlas-v5.json' + definition_file['auto'] = 'model_TPXO9-atlas-v5.def' val = definition_file[file_format] # read model definition file for format - m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) # model files model_files = ['TPXO9_atlas_v5/h_2n2_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_k1_tpxo9_atlas_30_v5.nc', @@ -543,7 +552,7 @@ def test_definition_TPXO9_glob(file_format): fid = io.StringIO() attrs = ['name','format','compressed','type','scale'] glob_string = r'TPXO9_atlas_v5/h*.nc' - if (file_format == 'ascii'): + if file_format in ('ascii','auto'): # create tab-delimited definition file for attr in attrs: val = getattr(m,attr) @@ -575,7 +584,7 @@ def test_definition_TPXO9_glob(file_format): # clean up model shutil.rmtree(filepath.joinpath('TPXO9_atlas_v5')) -@pytest.mark.parametrize("file_format", ['ascii','json']) +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_TPXO9_currents(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file for currents """ @@ -583,9 +592,10 @@ def test_definition_TPXO9_currents(file_format): definition_file = {} definition_file['ascii'] = 'model_TPXO9-atlas-v5_currents.def' definition_file['json'] = 'model_TPXO9-atlas-v5_currents.json' + definition_file['auto'] = 'model_TPXO9-atlas-v5_currents.json' val = definition_file[file_format] # read model definition file for format - m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) # model files for each component model_files = {} model_files['u'] = ['TPXO9_atlas_v5/u_2n2_tpxo9_atlas_30_v5.nc', @@ -632,8 +642,8 @@ def test_definition_TPXO9_currents(file_format): assert m.long_name['u'] == 'zonal_tidal_current' assert m.long_name['v'] == 'meridional_tidal_current' -# PURPOSE: test glob file functionality -@pytest.mark.parametrize("file_format", ['ascii','json']) +# PURPOSE: test glob file functionalityfrom_dict +@pytest.mark.parametrize("file_format", ['ascii','json','auto']) def test_definition_TPXO9_currents_glob(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file for currents with glob file searching @@ -642,6 +652,7 @@ def test_definition_TPXO9_currents_glob(file_format): definition_file = {} definition_file['ascii'] = 'model_TPXO9-atlas-v5_currents.def' definition_file['json'] = 'model_TPXO9-atlas-v5_currents.json' + definition_file['auto'] = 'model_TPXO9-atlas-v5_currents.def' val = definition_file[file_format] # read model definition file for format m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) @@ -692,7 +703,7 @@ def test_definition_TPXO9_currents_glob(file_format): attrs = ['name','format','compressed','type','scale'] glob_string_u = r'TPXO9_atlas_v5/u*.nc' glob_string_v = r'TPXO9_atlas_v5/u*.nc' - if (file_format == 'ascii'): + if file_format in ('ascii','auto'): # create tab-delimited definition file for attr in attrs: val = getattr(m,attr) diff --git a/test/test_perth3_read.py b/test/test_perth3_read.py index 5b8c809f..efdb26e8 100644 --- a/test/test_perth3_read.py +++ b/test/test_perth3_read.py @@ -17,6 +17,7 @@ UPDATE HISTORY: Updated 08/2024: increased tolerance for comparing with GOT4.7 tests as using nodal corrections from PERTH5 + use a reduced list of minor constituents to match GOT4.7 tests Updated 07/2024: add parametrize over cropping the model fields Updated 04/2024: use timescale for temporal operations Updated 01/2024: refactored compute functions into compute.py @@ -89,16 +90,18 @@ def download_model(aws_access_key_id,aws_secret_access_key,aws_region_name): # PURPOSE: Tests that interpolated results are comparable to PERTH3 program def test_verify_GOT47(METHOD, CROP): # model parameters for GOT4.7 - model_directory = filepath.joinpath('GOT4.7','grids_oceantide') + model = pyTMD.io.model(filepath,compressed=True).elevation('GOT4.7') # perth3 test program infers m4 tidal constituent model_files = ['q1.d.gz','o1.d.gz','p1.d.gz','k1.d.gz','n2.d.gz', 'm2.d.gz','s2.d.gz','k2.d.gz','s1.d.gz'] - model_file = [model_directory.joinpath(m) for m in model_files] + model.model_file = [model.model_directory.joinpath(m) for m in model_files] + # validate model constituents constituents = ['q1','o1','p1','k1','n2','m2','s2','k2','s1'] - model_format = 'GOT-ascii' - corrections, _, grid = model_format.partition('-') - GZIP = True - SCALE = 1.0 + model.parse_constituents() + assert model.constituents == constituents + corrections, _, grid = model.format.partition('-') + # keep amplitudes in centimeters for comparison with outputs + model.scale = 1.0 # read validation dataset with gzip.open(filepath.joinpath('perth_output_got4.7.gz'),'r') as fid: @@ -119,16 +122,16 @@ def test_verify_GOT47(METHOD, CROP): validation.data[i] = np.float64(line_contents[3]) validation.mask[i] = False - # convert time from MJD to days since 1992-01-01T00:00:00 - tide_time = MJD - 48622.0 + # convert time from MJD to timescale object + ts = timescale.time.Timescale(MJD=MJD) + # interpolate delta times + deltat = ts.tt_ut1 # extract amplitude and phase from tide model - amp,ph,cons = pyTMD.io.GOT.extract_constants(lon, lat, model_file, - grid=grid, method=METHOD, compressed=GZIP, scale=SCALE, crop=CROP) + amp,ph,cons = pyTMD.io.GOT.extract_constants(lon, lat, + model.model_file, grid=grid, method=METHOD, + compressed=model.compressed, scale=model.scale, crop=CROP) assert all(c in constituents for c in cons) - # interpolate delta times from calendar dates to tide time - deltat = timescale.time.interpolate_delta_time( - timescale.time._delta_file, tide_time) # calculate complex phase in radians for Euler's cph = -1j*ph*np.pi/180.0 # calculate constituent oscillations @@ -139,15 +142,14 @@ def test_verify_GOT47(METHOD, CROP): tide.mask = np.zeros((npts),dtype=bool) # predict tidal elevations at time and infer minor corrections tide.mask[:] = np.any(hc.mask, axis=1) - tide.data[:] = pyTMD.predict.drift(tide_time, hc, cons, - deltat=deltat, corrections=corrections) - minor = pyTMD.predict.infer_minor(tide_time, hc, cons, - deltat=deltat, corrections=corrections) + tide.data[:] = pyTMD.predict.drift(ts.tide, hc, cons, + deltat=deltat, corrections='perth3') + minor = pyTMD.predict.infer_minor(ts.tide, hc, cons, + deltat=deltat, corrections='perth3', minor=model.minor) tide.data[:] += minor.data[:] # will verify differences between model outputs are within tolerance - # since using newer nodal corrections than PERTH3: 5cm tolerance - eps = 0.05 + eps = 0.01 # calculate differences between perth3 and python version difference = np.ma.zeros((npts)) difference.data[:] = tide.data - validation @@ -160,15 +162,18 @@ def test_verify_GOT47(METHOD, CROP): # PURPOSE: Tests that interpolated results are comparable def test_compare_GOT47(METHOD): # model parameters for GOT4.7 - model_directory = filepath.joinpath('GOT4.7','grids_oceantide') + model = pyTMD.io.model(filepath,compressed=True).elevation('GOT4.7') # perth3 test program infers m4 tidal constituent model_files = ['q1.d.gz','o1.d.gz','p1.d.gz','k1.d.gz','n2.d.gz', 'm2.d.gz','s2.d.gz','k2.d.gz','s1.d.gz'] - model_file = [model_directory.joinpath(m) for m in model_files] - model_format = 'GOT-ascii' - corrections, _, grid = model_format.partition('-') - GZIP = True - SCALE = 1.0 + model.model_file = [model.model_directory.joinpath(m) for m in model_files] + # validate model constituents + constituents = ['q1','o1','p1','k1','n2','m2','s2','k2','s1'] + model.parse_constituents() + assert model.constituents == constituents + corrections, _, grid = model.format.partition('-') + # keep amplitudes in centimeters for comparison with outputs + model.scale = 1.0 # read validation dataset with gzip.open(filepath.joinpath('perth_output_got4.7.gz'),'r') as fid: @@ -183,15 +188,17 @@ def test_compare_GOT47(METHOD): lon[i] = np.float64(line_contents[1]) # extract amplitude and phase from tide model - amp1, ph1, c1 = pyTMD.io.GOT.extract_constants(lon, lat, model_file, - grid=grid, method=METHOD, compressed=GZIP, scale=SCALE) + amp1, ph1, c1 = pyTMD.io.GOT.extract_constants(lon, lat, + model.model_file, grid=grid, method=METHOD, + compressed=model.compressed, scale=model.scale) # calculate complex form of constituent oscillation hc1 = amp1*np.exp(-1j*ph1*np.pi/180.0) # read and interpolate constituents from tide model - constituents = pyTMD.io.GOT.read_constants(model_file, compressed=GZIP) + constituents = pyTMD.io.GOT.read_constants(model.model_file, + compressed=model.compressed) amp2, ph2 = pyTMD.io.GOT.interpolate_constants(lon, lat, - constituents, grid=grid, method=METHOD, scale=SCALE) + constituents, grid=grid, method=METHOD, scale=model.scale) # calculate complex form of constituent oscillation hc2 = amp2*np.exp(-1j*ph2*np.pi/180.0) diff --git a/test/test_solid_earth.py b/test/test_solid_earth.py index 0ba8e37f..a746b11a 100644 --- a/test/test_solid_earth.py +++ b/test/test_solid_earth.py @@ -370,11 +370,11 @@ def test_epochs(): """Test that the epoch conversions match expected outputs """ # Modified Julian Day (MJD) - assert np.isclose(pyTMD.astro._jd_mjd, 2400000.5) + assert np.isclose(pyTMD.astro._jd_mjd, 2400000.5) # J2000 time mjd_j2000 = timescale.time.convert_calendar_dates( *timescale.time._j2000_epoch, epoch=timescale.time._mjd_epoch) assert np.isclose(mjd_j2000, pyTMD.astro._mjd_j2000) - assert np.isclose(pyTMD.astro._mjd_j2000, 51544.5) + assert np.isclose(pyTMD.astro._mjd_j2000, 51544.5) assert np.isclose(pyTMD.astro._jd_j2000, 2451545.0)