Skip to content

Commit

Permalink
Use struct instead of global map
Browse files Browse the repository at this point in the history
  • Loading branch information
Lun4m committed Nov 18, 2024
1 parent 6143007 commit d5d4399
Show file tree
Hide file tree
Showing 8 changed files with 60 additions and 46 deletions.
72 changes: 39 additions & 33 deletions migrations/kdvh/db/main.go
Original file line number Diff line number Diff line change
@@ -1,41 +1,47 @@
package db

// Map of all tables found in KDVH, with set max import year
var KDVH map[string]*Table = map[string]*Table{
// Section 1: tables that need to be migrated entirely
// TODO: figure out if we need to use the elem_code_paramid_level_sensor_t_edata table?
"T_EDATA": NewTable("T_EDATA", "T_EFLAG", "T_ELEM_EDATA").SetImportYear(3000),
"T_METARDATA": NewTable("T_METARDATA", "", "T_ELEM_METARDATA").SetImportYear(3000),
type KDVH struct {
Tables map[string]*Table
}

func Init() *KDVH {
return &KDVH{map[string]*Table{
// Section 1: tables that need to be migrated entirely
// TODO: figure out if we need to use the elem_code_paramid_level_sensor_t_edata table?
"T_EDATA": NewTable("T_EDATA", "T_EFLAG", "T_ELEM_EDATA").SetImportYear(3000),
"T_METARDATA": NewTable("T_METARDATA", "", "T_ELEM_METARDATA").SetImportYear(3000),

// Section 2: tables with some data in kvalobs, import only up to 2005-12-31
"T_ADATA": NewTable("T_ADATA", "T_AFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_MDATA": NewTable("T_MDATA", "T_MFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_TJ_DATA": NewTable("T_TJ_DATA", "T_TJ_FLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_PDATA": NewTable("T_PDATA", "T_PFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_NDATA": NewTable("T_NDATA", "T_NFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_VDATA": NewTable("T_VDATA", "T_VFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_UTLANDDATA": NewTable("T_UTLANDDATA", "T_UTLANDFLAG", "T_ELEM_OBS").SetImportYear(2006),
// Section 2: tables with some data in kvalobs, import only up to 2005-12-31
"T_ADATA": NewTable("T_ADATA", "T_AFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_MDATA": NewTable("T_MDATA", "T_MFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_TJ_DATA": NewTable("T_TJ_DATA", "T_TJ_FLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_PDATA": NewTable("T_PDATA", "T_PFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_NDATA": NewTable("T_NDATA", "T_NFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_VDATA": NewTable("T_VDATA", "T_VFLAG", "T_ELEM_OBS").SetImportYear(2006),
"T_UTLANDDATA": NewTable("T_UTLANDDATA", "T_UTLANDFLAG", "T_ELEM_OBS").SetImportYear(2006),

// Section 3: tables that should only be dumped
"T_10MINUTE_DATA": NewTable("T_10MINUTE_DATA", "T_10MINUTE_FLAG", "T_ELEM_OBS"),
"T_ADATA_LEVEL": NewTable("T_ADATA_LEVEL", "T_AFLAG_LEVEL", "T_ELEM_OBS"),
"T_MINUTE_DATA": NewTable("T_MINUTE_DATA", "T_MINUTE_FLAG", "T_ELEM_OBS"),
"T_SECOND_DATA": NewTable("T_SECOND_DATA", "T_SECOND_FLAG", "T_ELEM_OBS"),
"T_CDCV_DATA": NewTable("T_CDCV_DATA", "T_CDCV_FLAG", "T_ELEM_EDATA"),
"T_MERMAID": NewTable("T_MERMAID", "T_MERMAID_FLAG", "T_ELEM_EDATA"),
"T_SVVDATA": NewTable("T_SVVDATA", "T_SVVFLAG", "T_ELEM_OBS"),
// Section 3: tables that should only be dumped
"T_10MINUTE_DATA": NewTable("T_10MINUTE_DATA", "T_10MINUTE_FLAG", "T_ELEM_OBS"),
"T_ADATA_LEVEL": NewTable("T_ADATA_LEVEL", "T_AFLAG_LEVEL", "T_ELEM_OBS"),
"T_MINUTE_DATA": NewTable("T_MINUTE_DATA", "T_MINUTE_FLAG", "T_ELEM_OBS"),
"T_SECOND_DATA": NewTable("T_SECOND_DATA", "T_SECOND_FLAG", "T_ELEM_OBS"),
"T_CDCV_DATA": NewTable("T_CDCV_DATA", "T_CDCV_FLAG", "T_ELEM_EDATA"),
"T_MERMAID": NewTable("T_MERMAID", "T_MERMAID_FLAG", "T_ELEM_EDATA"),
"T_SVVDATA": NewTable("T_SVVDATA", "T_SVVFLAG", "T_ELEM_OBS"),

// Section 4: special cases, namely digitized historical data
"T_MONTH": NewTable("T_MONTH", "T_MONTH_FLAG", "T_ELEM_MONTH").SetImportYear(1957),
"T_DIURNAL": NewTable("T_DIURNAL", "T_DIURNAL_FLAG", "T_ELEM_DIURNAL").SetImportYear(2006),
"T_HOMOGEN_DIURNAL": NewTable("T_HOMOGEN_DIURNAL", "", "T_ELEM_HOMOGEN_MONTH"),
"T_HOMOGEN_MONTH": NewTable("T_HOMOGEN_MONTH", "T_ELEM_HOMOGEN_MONTH", ""),
// Section 4: special cases, namely digitized historical data
"T_MONTH": NewTable("T_MONTH", "T_MONTH_FLAG", "T_ELEM_MONTH").SetImportYear(1957),
"T_DIURNAL": NewTable("T_DIURNAL", "T_DIURNAL_FLAG", "T_ELEM_DIURNAL").SetImportYear(2006),
"T_HOMOGEN_DIURNAL": NewTable("T_HOMOGEN_DIURNAL", "", "T_ELEM_HOMOGEN_MONTH"),
"T_HOMOGEN_MONTH": NewTable("T_HOMOGEN_MONTH", "T_ELEM_HOMOGEN_MONTH", ""),

// Section 5: tables missing in the KDVH proxy:
// 1. these exist in a separate database
"T_AVINOR": NewTable("T_AVINOR", "T_AVINOR_FLAG", "T_ELEM_OBS"),
"T_PROJDATA": NewTable("T_PROJDATA", "T_PROJFLAG", "T_ELEM_PROJ"),
// 2. these are not in active use and don't need to be imported in LARD
"T_DIURNAL_INTERPOLATED": NewTable("T_DIURNAL_INTERPOLATED", "", ""),
"T_MONTH_INTERPOLATED": NewTable("T_MONTH_INTERPOLATED", "", ""),
// Section 5: tables missing in the KDVH proxy:
// 1. these exist in a separate database
"T_AVINOR": NewTable("T_AVINOR", "T_AVINOR_FLAG", "T_ELEM_OBS"),
"T_PROJDATA": NewTable("T_PROJDATA", "T_PROJFLAG", "T_ELEM_PROJ"),
// 2. these are not in active use and don't need to be imported in LARD
"T_DIURNAL_INTERPOLATED": NewTable("T_DIURNAL_INTERPOLATED", "", ""),
"T_MONTH_INTERPOLATED": NewTable("T_MONTH_INTERPOLATED", "", ""),
}}
}
3 changes: 2 additions & 1 deletion migrations/kdvh/dump/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ func (config *DumpConfig) Execute([]string) error {
return nil
}

for _, table := range db.KDVH {
kdvh := db.Init()
for _, table := range kdvh.Tables {
if config.Tables != nil && !slices.Contains(config.Tables, table.TableName) {
continue
}
Expand Down
4 changes: 2 additions & 2 deletions migrations/kdvh/import/cache/kdvh.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ type Timespan struct {
ToTime *time.Time `db:"tdato"`
}

func cacheKDVH(tables, stations, elements []string) KDVHMap {
func cacheKDVH(tables, stations, elements []string, kdvh *db.KDVH) KDVHMap {
cache := make(KDVHMap)

fmt.Println("Connecting to KDVH proxy to cache metadata")
Expand All @@ -46,7 +46,7 @@ func cacheKDVH(tables, stations, elements []string) KDVHMap {
}
defer conn.Close(context.TODO())

for _, t := range db.KDVH {
for _, t := range kdvh.Tables {
if tables != nil && !slices.Contains(tables, t.TableName) {
continue
}
Expand Down
9 changes: 5 additions & 4 deletions migrations/kdvh/import/cache/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/jackc/pgx/v5/pgxpool"
"github.com/rickb777/period"

"migrate/kdvh/db"
"migrate/lard"
)

Expand All @@ -23,9 +24,9 @@ type Cache struct {
StationPermits StationPermitMap
}

// Caches all the metadata needed for import.
// Caches all the metadata needed for import of KDVH tables.
// If any error occurs inside here the program will exit.
func CacheMetadata(tables, stations, elements []string) *Cache {
func CacheMetadata(tables, stations, elements []string, kdvh *db.KDVH) *Cache {
fmt.Println("Connecting to Stinfosys to cache metadata")
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
Expand All @@ -36,7 +37,7 @@ func CacheMetadata(tables, stations, elements []string) *Cache {
os.Exit(1)
}

stinfoMeta := cacheStinfoMeta(tables, elements, conn)
stinfoMeta := cacheStinfoMeta(tables, elements, kdvh, conn)
stationPermits := cacheStationPermits(conn)
paramPermits := cacheParamPermits(conn)

Expand All @@ -47,7 +48,7 @@ func CacheMetadata(tables, stations, elements []string) *Cache {
StationPermits: stationPermits,
ParamPermits: paramPermits,
Offsets: cacheParamOffsets(),
KDVH: cacheKDVH(tables, stations, elements),
KDVH: cacheKDVH(tables, stations, elements, kdvh),
}
}

Expand Down
4 changes: 2 additions & 2 deletions migrations/kdvh/import/cache/stinfosys.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ type StinfoParam struct {
}

// Save metadata for later use by quering Stinfosys
func cacheStinfoMeta(tables, elements []string, conn *pgx.Conn) StinfoMap {
func cacheStinfoMeta(tables, elements []string, kdvh *db.KDVH, conn *pgx.Conn) StinfoMap {
cache := make(StinfoMap)

for _, table := range db.KDVH {
for _, table := range kdvh.Tables {
if tables != nil && !slices.Contains(tables, table.TableName) {
continue
}
Expand Down
6 changes: 4 additions & 2 deletions migrations/kdvh/import/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@ func (config *Config) Execute([]string) error {
os.Exit(1)
}

kdvh := db.Init()

// Cache metadata from Stinfosys, KDVH, and local `product_offsets.csv`
cache := cache.CacheMetadata(config.Tables, config.Stations, config.Elements)
cache := cache.CacheMetadata(config.Tables, config.Stations, config.Elements, kdvh)

// Create connection pool for LARD
pool, err := pgxpool.New(context.TODO(), os.Getenv("LARD_STRING"))
Expand All @@ -43,7 +45,7 @@ func (config *Config) Execute([]string) error {
}
defer pool.Close()

for _, table := range db.KDVH {
for _, table := range kdvh.Tables {
if config.Tables != nil && !slices.Contains(config.Tables, table.TableName) {
continue
}
Expand Down
4 changes: 3 additions & 1 deletion migrations/kdvh/kdvh_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,14 @@ func TestImportKDVH(t *testing.T) {
{table: "T_MDATA", station: 12345, elem: "TA", permit: 1, expectedRows: 2644}, // open TS
}

kdvh := db.Init()

// TODO: test does not fail, if flags are not inserted
// TODO: bar does not work well with log print outs
for _, c := range testCases {
config, cache := c.mockConfig()

table, ok := db.KDVH[c.table]
table, ok := kdvh.Tables[c.table]
if !ok {
t.Fatal("Table does not exist in database")
}
Expand Down
4 changes: 3 additions & 1 deletion migrations/kdvh/list/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@ type Config struct{}
func (config *Config) Execute(_ []string) error {
fmt.Println("Available tables in KDVH:")

kdvh := db.Init()

var tables []string
for table := range db.KDVH {
for table := range kdvh.Tables {
tables = append(tables, table)
}

Expand Down

0 comments on commit d5d4399

Please sign in to comment.