From 9c168a6a58854c374cd4c7b13b23cba28adeb7e2 Mon Sep 17 00:00:00 2001 From: Bartosz Czernecki Date: Sat, 1 Apr 2023 15:48:48 +0200 Subject: [PATCH] Dev (#83) climate ver. 1.1.1 rc_1 --- .github/workflows/R-CMD-check.yaml | 4 +- .github/workflows/pkgdown.yaml | 47 ++++--- DESCRIPTION | 4 +- NEWS.md | 6 + R/clean_metadata_meteo.R | 14 +- R/meteo_metadata_imgw.R | 8 +- R/meteo_shortening_imgw.R | 16 ++- R/ogimet_daily.R | 42 +++--- R/onAttach.R | 2 +- R/sounding_wyoming.R | 11 +- README.md | 26 ++-- tests/testthat/test-meteo_metadata_imgw.R | 29 ++-- tests/testthat/test-meteo_ogimet.R | 25 ++-- vignettes/articles/usecase_outdated.txt | 155 ---------------------- 14 files changed, 135 insertions(+), 254 deletions(-) delete mode 100644 vignettes/articles/usecase_outdated.txt diff --git a/.github/workflows/R-CMD-check.yaml b/.github/workflows/R-CMD-check.yaml index 0f145b01..c6efaa83 100644 --- a/.github/workflows/R-CMD-check.yaml +++ b/.github/workflows/R-CMD-check.yaml @@ -43,7 +43,7 @@ jobs: steps: - uses: actions/checkout@v2 - - uses: r-lib/actions/setup-r@v1 + - uses: r-lib/actions/setup-r@v2 with: r-version: ${{ matrix.config.r }} http-user-agent: ${{ matrix.config.http-user-agent }} @@ -52,7 +52,7 @@ jobs: - name: Query dependencies run: | - install.packages('remotes') + install.packages(c('remotes', 'covr')) saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2) writeLines(sprintf("R-%i.%i", getRversion()$major, getRversion()$minor), ".github/R-version") shell: Rscript {0} diff --git a/.github/workflows/pkgdown.yaml b/.github/workflows/pkgdown.yaml index ef441d1a..2a3cc7a8 100644 --- a/.github/workflows/pkgdown.yaml +++ b/.github/workflows/pkgdown.yaml @@ -1,39 +1,46 @@ # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help -name: pkgdown on: push: - branches: [main, master, dev, devel] + branches: [main, master, dev] pull_request: - branches: [main, master, dev, devel] + branches: [main, master, dev] release: types: [published] workflow_dispatch: +name: pkgdown + jobs: - build: + pkgdown: runs-on: ubuntu-latest - container: bczernecki/meteo:latest + # Only restrict concurrency for non-PR jobs + concurrency: + group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} env: GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Checkout Project - uses: actions/checkout@v1 - - - name: Checked for installed packages - run: | - sudo apt-get install -y libfontconfig1-dev libharfbuzz-dev libfribidi-dev rsync - R -e 'install.packages(c("pkgdown", "openair", "rnaturalearthdata"))' - R -e 'installed.packages()[, 1:3]' - - - name: Build book - run: | - Rscript -e 'pkgdown::build_site()' + - uses: actions/checkout@v3 + + - uses: r-lib/actions/setup-pandoc@v2 + + - uses: r-lib/actions/setup-r@v2 + with: + use-public-rspm: true + + - uses: r-lib/actions/setup-r-dependencies@v2 + with: + extra-packages: any::pkgdown, any::openair, local::. + needs: website + + - name: Build site + run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) + shell: Rscript {0} - name: Deploy to GitHub pages 🚀 - uses: JamesIves/github-pages-deploy-action@4.1.4 + if: github.event_name != 'pull_request' + uses: JamesIves/github-pages-deploy-action@v4.4.1 with: clean: false branch: gh-pages - folder: docs \ No newline at end of file + folder: docs diff --git a/DESCRIPTION b/DESCRIPTION index f09ce9b8..c73d0a16 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,6 +1,6 @@ Package: climate Title: Interface to Download Meteorological (and Hydrological) Datasets -Version: 1.1.0 +Version: 1.1.1 Authors@R: c(person(given = "Bartosz", family = "Czernecki", role = c("aut", "cre"), @@ -27,7 +27,7 @@ License: MIT + file LICENSE Encoding: UTF-8 LazyData: true Roxygen: list(markdown = TRUE) -RoxygenNote: 7.2.1 +RoxygenNote: 7.2.3 Depends: R (>= 3.5.0) Imports: diff --git a/NEWS.md b/NEWS.md index 54e3bfe3..274b7d4d 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,9 @@ +# climate 1.1.1 + +* Fix problems with downloading `precip` dataset from IMGW-PIB repository after recent changes in metadata +* Bug fix for `ogimet_daily` if data contains more than one year + + # climate 1.1.0 * A new approach for handling CRAN policy for resolving problems if network issues are detected or some of the external services are temporarily down. diff --git a/R/clean_metadata_meteo.R b/R/clean_metadata_meteo.R index 2f7f0714..8821d4c4 100644 --- a/R/clean_metadata_meteo.R +++ b/R/clean_metadata_meteo.R @@ -14,8 +14,7 @@ clean_metadata_meteo = function(address, rank = "synop", interval = "hourly") { temp = tempfile() test_url(link = address, output = temp) a = readLines(temp, warn = FALSE) - - a = iconv(a, from = "cp1250", to = "ASCII//TRANSLIT") + a = iconv(a, from = "CP1250", to = "ASCII//TRANSLIT") a = gsub(a, pattern = "\\?", replacement = "") # additional workarounds for mac os but not only... @@ -27,10 +26,10 @@ clean_metadata_meteo = function(address, rank = "synop", interval = "hourly") { # fileEncoding = "CP1250", stringsAsFactors = FALSE))) length_char = max(nchar(a$V1), na.rm = TRUE) - if (rank == "precip" && interval == "hourly") length_char = 40 # wyjatek dla precipow - if (rank == "precip" && interval == "daily") length_char = 40 # wyjatek dla precipow dobowych - if (rank == "synop" && interval == "hourly") length_char = 60 # wyjatek dla synopow terminowych - if (rank == "climate" && interval == "monthly") length_char = 52 # wyjatek dla synopow terminowych + if (rank == "precip" && interval == "hourly") length_char = 40 # exception for precip / hourly + if (rank == "precip" && interval == "daily") length_char = 38 # exception for precip / daily + if (rank == "synop" && interval == "hourly") length_char = 60 # exception for synop / hourly + if (rank == "climate" && interval == "monthly") length_char = 52 # exception for climate / monthly field = substr(a$V1, length_char - 3, length_char) @@ -43,8 +42,9 @@ clean_metadata_meteo = function(address, rank = "synop", interval = "hourly") { a$field2 = suppressWarnings(as.numeric(unlist(lapply(strsplit(field, "/"), function(x) x[2])))) a$V1 = trimws(substr(a$V1, 1, nchar(a$V1) - 3)) + a$V1 = gsub(x = a$V1, pattern = "* ", "") - strsplit(x = a$V1, split = "/") + #strsplit(x = a$V1, split = "/") #a = a[nchar(a$V1)>2,] # remove empty or almost empty rows a = a[!(is.na(a$field1) & is.na(a$field2)), ] # remove info about status colnames(a)[1] = "parameters" diff --git a/R/meteo_metadata_imgw.R b/R/meteo_metadata_imgw.R index 696fc689..6a932627 100644 --- a/R/meteo_metadata_imgw.R +++ b/R/meteo_metadata_imgw.R @@ -14,16 +14,16 @@ #' #meta = climate:::meteo_metadata_imgw(interval = "monthly", rank = "precip") #' } -meteo_metadata_imgw = function(interval, rank) { # interval moze byc: monthly, hourly, hourly +meteo_metadata_imgw = function(interval, rank) { # interval can be: monthly, hourly, hourly b = NULL base_url = "https://danepubliczne.imgw.pl/data/dane_pomiarowo_obserwacyjne/" - # METADANE daily: - if (interval == "daily") { # uwaga! daily maja dla climateow i synopow po 2 pliki z metadanymi!!! + # METADATA daily: + if (interval == "daily") { # warning! daily for climates and synop have 2 files with metadata!!! if (rank == "synop") { - b[[1]] = clean_metadata_meteo(address = paste0(base_url, "dane_meteorologiczne/dobowe/synop/s_d_format.txt"), + b[[1]] = clean_metadata_meteo(address = paste0(base_url,"dane_meteorologiczne/dobowe/synop/s_d_format.txt"), rank = "synop", interval = "daily") b[[2]] = clean_metadata_meteo(address = paste0(base_url, "dane_meteorologiczne/dobowe/synop/s_d_t_format.txt"), rank = "synop", interval = "daily") diff --git a/R/meteo_shortening_imgw.R b/R/meteo_shortening_imgw.R index 24f9c3c8..3b8c9183 100644 --- a/R/meteo_shortening_imgw.R +++ b/R/meteo_shortening_imgw.R @@ -20,6 +20,17 @@ meteo_shortening_imgw = function(data, col_names = "short", remove_duplicates = TRUE) { + # removing duplicated column names: (e.g. station's name) + if (remove_duplicates == TRUE) { + data = data[, !duplicated(colnames(data))] + + # fix for merged station names with suffixes + if (any(colnames(data) %in% c("Nazwa stacji.x", "Nazwa stacji.y"))) { + data$`Nazwa stacji.y` = NULL + colnames(data)[colnames(data) == "Nazwa stacji.x"] = "Nazwa stacji" + } + } + if (col_names != "polish") { abbrev = climate::imgw_meteo_abbrev orig_columns = trimws(gsub("\\s+", " ", colnames(data))) # remove double spaces @@ -38,11 +49,6 @@ meteo_shortening_imgw = function(data, col_names = "short", remove_duplicates = } } - # removing duplicated column names: (e.g. station's name) - if (remove_duplicates == TRUE) { - data = data[, !duplicated(colnames(data))] - } - return(data) } # end of function diff --git a/R/ogimet_daily.R b/R/ogimet_daily.R index 77d7a5ca..063b9219 100644 --- a/R/ogimet_daily.R +++ b/R/ogimet_daily.R @@ -21,8 +21,6 @@ #' } #' - - ogimet_daily = function(date = c(Sys.Date() - 30, Sys.Date()), coords = FALSE, station = NA, @@ -95,14 +93,17 @@ ogimet_daily_bp = function(date = date, month = format(dates[i], "%m") day = format(dates[i], "%d") ndays = day + linkpl2 = paste("https://www.ogimet.com/cgi-bin/gsynres?lang=en&ind=", station_nr, "&ndays=32&ano=", year, "&mes=", month, "&day=", day, "&hora=", hour,"&ord=REV&Send=Send", sep = "") - if (month == 1) linkpl2 = paste("https://www.ogimet.com/cgi-bin/gsynres?lang=en&ind=", station_nr, "&ndays=32&ano=", year, "&mes=", month, "&day=", day, "&hora=", hour, "&ord=REV&Send=Send", sep = "") - temp = tempfile() test_url(linkpl2, temp) - - # run only if downloaded file is valid - if (!is.na(file.size(temp)) & (file.size(temp) > 500)) { + if (is.na(file.size(temp)) | (file.size(temp) < 500)) { + message("Problem with downloading data from:", linkpl2, "\n") + if (exists("data_station")) { + message("Returning results downloaded up to this point:\n") + return(data_station) + } + } else { # run only if downloaded file is valid a = readHTMLTable(temp, stringsAsFactors = FALSE) unlink(temp) @@ -124,7 +125,8 @@ ogimet_daily_bp = function(date = date, test = b[1:2, ] if (is.null(test) ) { - warning(paste0("Wrong station ID: ", station_nr, " You can check available stations ID at https://ogimet.com/display_stations.php?lang=en&tipo=AND&isyn=&oaci=&nombre=&estado=&Send=Send")) + warning(paste0("Wrong station ID: ", station_nr, + " You can check available stations ID at https://ogimet.com/display_stations.php?lang=en&tipo=AND&isyn=&oaci=&nombre=&estado=&Send=Send")) return(data_station) } @@ -168,7 +170,7 @@ ogimet_daily_bp = function(date = date, names_col = "Error_column" } - names_col <- + names_col = gsub("[^A-Za-z0-9]", "", as.character(lapply(names_col, as.character), stringsAsFactors = FALSE)) @@ -177,10 +179,17 @@ ogimet_daily_bp = function(date = date, b = b[-c(1:2), ] b["station_ID"] = station_nr - # adding year to date - b$Date = as.character(paste0(b$Date, "/", year)) - - + # extra check if date is for December and January simultanously + # e.g. "01/02" "01/01" "12/31" "12/30" + uniq_mths = sort(unique(unlist(lapply(strsplit(b$Date, "/"), "[[", 1)))) + if (sum(uniq_mths %in% c("01", "12")) == 2) { + mth = unlist(lapply(strsplit(b$Date, "/"), "[[", 1)) + yr = ifelse(mth == "01", as.numeric(year), as.numeric(year) - 1) + b$Date = as.character(paste0(b$Date, "/", yr)) + } else { + b$Date = as.character(paste0(b$Date, "/", year)) + } + # to avoid gtools::smartbind function or similar from another package.. if (ncol(data_station) >= ncol(b)) { b[setdiff(names(data_station), names(b))] = NA # adding missing columns @@ -196,9 +205,6 @@ ogimet_daily_bp = function(date = date, } - # cat(paste(year,month,"\n")) - # coords można lepiej na samym koncu dodać kolumne - # wtedy jak zmienia się lokalizacja na dacie to tutaj tez if (coords) { coord = a[[1]][2, 1] data_station["Lon"] = get_coord_from_string(coord, "Longitude") @@ -247,8 +253,10 @@ ogimet_daily_bp = function(date = date, data_station$Date = as.Date(as.character(data_station$Date), format = "%m/%d/%Y") # clipping to interesting period as we're downloading slightly more than needed: data_station = data_station[which(data_station$Date >= as.Date(min(date)) & as.Date(data_station$Date) <= as.Date(max(date))), ] - + } # end of checking whether no. of rows > 0 + # removing duplicates: + data_station = data_station[row.names(unique(data_station[, c("station_ID", "Date")])), ] return(data_station) } diff --git a/R/onAttach.R b/R/onAttach.R index 3438237c..b4a22711 100644 --- a/R/onAttach.R +++ b/R/onAttach.R @@ -8,7 +8,7 @@ ver = as.character(packageVersion("climate")) packageStartupMessage(paste0(c("\n____________________________________________________________________\n", " Welcome to climate ", ver, "!", - "\n- More about the package and datasets: github.com/bczernecki/climate", + "\n- More about the package and datasets: bczernecki.github.io/climate", "\n- Using 'climate' for publication? See: citation('climate')\n", "____________________________________________________________________\n"))) } diff --git a/R/sounding_wyoming.R b/R/sounding_wyoming.R index 80c27806..03eb18de 100644 --- a/R/sounding_wyoming.R +++ b/R/sounding_wyoming.R @@ -100,13 +100,15 @@ sounding_wyoming_bp = function(wmo_id, dd = formatC(dd, width = 2, format = "d", flag = "0") hh = formatC(hh, width = 2, format = "d", flag = "0") min = formatC(min, width = 2, format = "d", flag = "0") - + if (bufr) { - url = paste0("http://weather.uwyo.edu/cgi-bin/bufrraob.py?datetime=", - yy, "-", mm, "-", dd, "+", hh, ":", min, ":00&id=", wmo_id, "&type=TEXT:LIST") + url = paste0("http://weather.uwyo.edu/cgi-bin/bufrraob.py?src=bufr&datetime=", + yy, "-", mm, "-", dd, "+", hh, ":", min, ":00&id=", + sprintf("%05d", wmo_id), "&type=TEXT:LIST") } else { url = paste0("http://weather.uwyo.edu/cgi-bin/sounding?TYPE=TEXT%3ALIST&YEAR=", - yy, "&MONTH=", mm, "&FROM=", dd, hh, "&TO=", dd, hh, "&STNM=", wmo_id) + yy, "&MONTH=", mm, "&FROM=", dd, hh, "&TO=", dd, hh, "&STNM=", + sprintf("%05d", wmo_id)) } temp = tempfile() @@ -154,4 +156,3 @@ sounding_wyoming_bp = function(wmo_id, unlink(temp) return(df) } - diff --git a/README.md b/README.md index f2e64eda..520c37eb 100644 --- a/README.md +++ b/README.md @@ -248,27 +248,29 @@ ggplot(co2, aes(date, co2_avg)) + ```python # load required packages from rpy2.robjects.packages import importr -from rpy2.robjects import r import rpy2.robjects as robjects import pandas as pd +import datetime as dt # load climate package (make sure that it was installed in R before) importr('climate') # test functionality e.g. with meteo_ogimet function for New York - La Guardia: -df = robjects.r['meteo_ogimet'](interval = "daily", station = 72503) -# optionally - transform object to pandas data frame and rename columns: +df = robjects.r['meteo_ogimet'](interval = "daily", station = 72503, + date = robjects.StrVector(['2022-05-01', '2022-06-15'])) +# optionally - transform object to pandas data frame and rename columns + fix datetime: res = pd.DataFrame(df).transpose() res.columns = df.colnames +res['Date'] = pd.TimedeltaIndex(res['Date'], unit='d') + dt.datetime(1970,1,1) +res.head + +>>> res[res.columns[0:7]].head() +# station_ID Date TemperatureCAvg ... TemperatureCMin TdAvgC HrAvg +#0 72503.0 2022-06-15 23.5 ... 19.4 10.9 45.2 +#1 72503.0 2022-06-14 25.0 ... 20.6 16.1 59.0 +#2 72503.0 2022-06-13 20.4 ... 17.8 16.0 74.8 +#3 72503.0 2022-06-12 21.3 ... 18.3 12.0 57.1 +#4 72503.0 2022-06-11 22.6 ... 17.8 8.1 40.1 ->>> res -# station_ID Date TemperatureCAvg -#0 72503.0 19227.0 24.7 -#1 72503.0 19226.0 25.1 -#2 72503.0 19225.0 27.5 -#3 72503.0 19224.0 26.8 -#4 72503.0 19223.0 24.7 -#5 72503.0 19222.0 23.3 -#[178 rows x 23 columns] ``` ## Acknowledgment diff --git a/tests/testthat/test-meteo_metadata_imgw.R b/tests/testthat/test-meteo_metadata_imgw.R index eb146420..d82d2dd9 100644 --- a/tests/testthat/test-meteo_metadata_imgw.R +++ b/tests/testthat/test-meteo_metadata_imgw.R @@ -1,7 +1,7 @@ context("meteo-metadata") test_that("tests to be re-written meteo_metadata_imgw", { - skip("meteo-metadata skipping") + #skip("meteo-metadata skipping") m_hs <- meteo_metadata_imgw("hourly", "synop") m_hc <- meteo_metadata_imgw("hourly", "climate") m_ds <- meteo_metadata_imgw("daily", "synop") @@ -10,21 +10,18 @@ test_that("tests to be re-written meteo_metadata_imgw", { m_ms <- meteo_metadata_imgw("monthly", "synop") m_mc <- meteo_metadata_imgw("monthly", "climate") m_mp <- meteo_metadata_imgw("monthly", "precip") -} -) -test_that("meteo-metadata works!", { expect_error(meteo_metadata_imgw("hourly", "precip")) - # expect_equal(dim(m_hs[[1]]), c(107, 3)) - # expect_equal(dim(m_hc[[1]]), c(22, 3)) - # expect_equal(dim(m_ds[[1]]), c(65, 3)) - # expect_equal(dim(m_ds[[2]]), c(23, 3)) - # expect_equal(dim(m_dc[[1]]), c(18, 3)) - # expect_equal(dim(m_dc[[2]]), c(13, 3)) - # expect_equal(dim(m_dp[[1]]), c(16, 3)) - # expect_equal(dim(m_ms[[1]]), c(60, 3)) - # expect_equal(dim(m_ms[[2]]), c(22, 3)) - # expect_equal(dim(m_mc[[1]]), c(27, 3)) - # expect_equal(dim(m_mc[[2]]), c(12, 3)) - # expect_equal(dim(m_mp[[1]]), c(14, 3)) + expect_equal(dim(m_hs[[1]]), c(107, 3)) + expect_equal(dim(m_hc[[1]]), c(22, 3)) + expect_equal(dim(m_ds[[1]]), c(65, 3)) + expect_equal(dim(m_ds[[2]]), c(23, 3)) + expect_equal(dim(m_dc[[1]]), c(18, 3)) + expect_equal(dim(m_dc[[2]]), c(13, 3)) + expect_equal(dim(m_dp[[1]]), c(16, 3)) + expect_equal(dim(m_ms[[1]]), c(60, 3)) + expect_equal(dim(m_ms[[2]]), c(22, 3)) + expect_equal(dim(m_mc[[1]]), c(27, 3)) + expect_equal(dim(m_mc[[2]]), c(12, 3)) + expect_equal(dim(m_mp[[1]]), c(14, 3)) }) diff --git a/tests/testthat/test-meteo_ogimet.R b/tests/testthat/test-meteo_ogimet.R index f8521846..9eab933a 100644 --- a/tests/testthat/test-meteo_ogimet.R +++ b/tests/testthat/test-meteo_ogimet.R @@ -41,12 +41,21 @@ test_that("meteo_ogimet works!", { # only wind measurement are present: - # testthat::expect_error( - # meteo_ogimet( - # date = c(as.Date("2020-02-01"), Sys.Date() - 1), - # # date = c(Sys.Date() - 7, Sys.Date() - 1), - # interval = "daily", - # coords = FALSE, - # station = "06683", allow_failure = FALSE) - # ) + testthat::expect_error( + meteo_ogimet( + date = c(as.Date("2020-02-01"), Sys.Date() - 1), + # date = c(Sys.Date() - 7, Sys.Date() - 1), + interval = "daily", + coords = FALSE, + station = "06683", allow_failure = FALSE) + ) + + # check change between years: + multiyr = ogimet_daily(date = c( as.Date("2022-11-15"), + as.Date("2023-02-01")), + station = 12330) + if (is.data.frame(multiyr)) { + testthat::expect_true(nrow(multiyr) > 70) + } + }) diff --git a/vignettes/articles/usecase_outdated.txt b/vignettes/articles/usecase_outdated.txt deleted file mode 100644 index db22b3ef..00000000 --- a/vignettes/articles/usecase_outdated.txt +++ /dev/null @@ -1,155 +0,0 @@ ---- -title: "Use Case - IMGW database + Rawinsonde data [PL]" -output: html_document ---- - -```{r setup, include=FALSE} -knitr::opts_chunk$set(echo = TRUE) -``` - -## IMGW-PIB - dane meteo (example no. 1) - -1. Pobieranie danych miesięcznych ze stacji meteo (SYNOP) dla Poznania w latach 1966-2018. Należy zwrócić uwagę, że w bazie IMGW-PIB stacja występuje jako "POZNAŃ" i "POZNAŃ-ŁAWICA", stąd też konieczne zdefiniowanie obu nazw. -2. Przeliczenie wartości miesięcznych na wartości roczne -3. Wykonanie wykresu zmienności średniej rocznej temperatury oraz obliczenie trendu zmian za pomocą modelu regresji liniowej - -```{r} -library(climate) -library(ggplot2) -library(dplyr) -meteo_poz = meteo_imgw(interval = "monthly", rank = "synop", year = 1966:2019) %>% - filter(station %in% c("POZNAŃ", "POZNAŃ-ŁAWICA")) %>% - select(id, station, yy, mm, t2m_mean_mon) %>% - group_by(yy) %>% - summarise(t2 = mean(t2m_mean_mon)) - -ggplot(meteo_poz, aes(yy, t2)) + - geom_line() + - geom_smooth(method = "lm", se = FALSE) + - labs(title = "Poznań", - subtitle = "Annual mean air temperature (1966 - 2019)", - x = "Year", - y = "Temperature [*C]") + - ggpubr::stat_regline_equation() - -# or as an alternative: -plot( - meteo_poz$yy, - meteo_poz$t2, - type = "l", - xlab = "Year", - ylab = "Temperature [*C]", - main = "Poznań - annual mean air temperature (1966 - 2019)" - ) -model = lm(t2 ~ yy, data = meteo_poz) -abline(model, col = "red", lwd = 3) -legend("topleft", - legend = paste0("y = ", round(coef(model)[2], 3), "x ", - round(coef(model)[1], 1)), bty = "n") -``` - -## IMGW-PIB - dane meteo (example no. 2) - -1. Pobranie danych meteorologicznych dla wszystkich stacji synoptycznych za okres 1966-2017. -2. Przetworzenie pobranych danych na serie roczne -3. Przekształcenie współrzędnych geograficznych (EPSG 4326) na siatkę PUWG-1992 (EPSG 2180); Rozmiar zdefiniowanej domeny obliczeniowej 50 x 50 punktów -4. Transformacja wartości punktowych na rozkład przestrzenny za pomocą interpolacji IDW -5. Obliczenie trendu dla serii rocznej w punktach gridowych - mapa współczynnika kierunkowego trendu w Polsce w latach 1966-2017 - -```{r} -library(climate) -library(dplyr) -library(sf) -library(gstat) -library(tmap) -library(raster) -library(rnaturalearth) - -# downloads monthly data and calculates annual mean air temperature -meteo_mon = meteo_imgw(interval = "monthly", rank = "synop", status = F, - year = 1966:2019, coords = TRUE) %>% - dplyr::select(id, station, X, Y, yy, mm, t2m_mean_mon) %>% - dplyr::filter(id != 352180345) %>% # stacja w Kole ma kilka zer zamiast NA - dplyr::filter(id != 349190650 | id != 349190510) %>% # nie bierzemy obserwatorium na Kasprowym i Sniezce - group_by(yy, id, station, X, Y) %>% - summarise(t2 = mean(t2m_mean_mon)) - -# creates spatial object -meteo_mon_sp = meteo_mon %>% - filter(!is.na(X)) %>% - st_as_sf(coords = c("X", "Y"), crs = 4326) - -# plots spatial data -europe = ne_countries(scale = "medium", continent = "Europe", returnclass = "sf") -tm_shape(europe) + - tm_borders(col = "black", lwd = 2) + - tm_shape(meteo_mon_sp) + - tm_symbols(col = "t2", title.col = "", style = "kmeans") + - tm_facets(by = "yy") + - tm_layout(main.title = "Annual mean air temperature in Poland") - -# prepares spatial data in a projected crs -meteo_mon_sp = st_transform(meteo_mon_sp, 2180) -poland = filter(europe, iso_a3 == "POL") %>% - st_transform(2180) - -# creates an empty grid -meteo_grid = raster(poland, ncol = 50, nrow = 50) - -# splits data by year, creates idw models, and make interpolations -meteo_inter = meteo_mon_sp %>% - group_split(yy) %>% - purrr::map(~gstat(formula = t2~1, data = ., #idw - nmax = 10, set = list(idp = 0.1))) %>% - purrr::map(~interpolate(meteo_grid, .)) %>% - stack() - -# calculates a trend for each pixel -years = 1966:2019 -meteo_trend = calc(meteo_inter, fun = function(x) {summary(lm(x ~ years))$coefficients[2]}) %>% - mask(poland) - -# plots a trend map -tm_shape(meteo_trend) + - tm_raster(style = "cont", title = "", breaks = seq(0, 0.04, 0.01)) + - tm_shape(europe) + - tm_borders(col = "black", lwd = 2) + - tm_layout(legend.outside = TRUE, - main.title = "Annual mean air temperature in Poland - -annual trend") -``` - - -## IMGW-PIB - dane hydrologicze (example no. 2) -TODO - - -## DANE RADIOSONDAŻOWE - meteo_sounding() - - -### Pobieranie danych i wykres roboczy dla inwersji - - Pobieranie danych dla stacji radiosondażowej w Legionowie z dn. 16/02/2019: - -```{r} - sounding <- meteo_sounding(wmo_id = 12374, yy = 2019, mm = 2, dd = 16, hh = 0) - head(sounding) - plot(sounding[[1]]$HGHT, sounding[[1]]$PRES, type = 'l') - - # lub cos bardziej uzytecznego: - plot(sounding[[1]]$TEMP, sounding[[1]]$HGHT, col = 'blue', type = 'l', lwd=2 , xlab = 'Temperatura', ylab = 'm (n.p.m.)', - ylim=c(0,4000), xlim=c(-16, 6), main = "Inwersja w Legionowie") - lines(sounding[[1]]$DWPT, sounding[[1]]$HGHT, col = 'red') - grid() -``` - - -### Pobieranie danych dla dłuższego przedziału czasu - - Pobieranie danych dla stacji w Łebie dla zdefiniowanego przedziału czas. - W poniższym przykładzie pobierane są tylko dane radiosondażowe z godz. 00 UTC: - -```{r} - daty <- seq.Date(as.Date("2015-02-27"), as.Date("2015-03-02"), by="day") - daty <- strsplit(as.character(daty), "-") - sounding <- lapply(daty, function(x) meteo_sounding(wmo_id = 12120, yy = x[1], mm = x[2], dd = x[3], hh = "00")) -```