Skip to content

Commit

Permalink
Merge pull request #50 from phobson/more-gh-actions-part2
Browse files Browse the repository at this point in the history
More GH  actions part2
  • Loading branch information
phobson authored Nov 3, 2020
2 parents a811c76 + bb5054b commit 7b1598d
Show file tree
Hide file tree
Showing 24 changed files with 192 additions and 59 deletions.
File renamed without changes.
32 changes: 32 additions & 0 deletions .github/workflows/check-test-coverage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: Coverage via codecov
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

jobs:
run:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Generate coverage report
run: |
python -m pip install --upgrade pip
pip install pytest pytest-cov pytest-mpl coverage docopt
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install scipy
export MPL_IMGCOMP_TOLERANCE=35
coverage run --source cloudside check_cloudside.py --doctest-modules --cov-report=xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
# directory: ./coverage/reports/
flags: unittests
name: codecov-umbrella
fail_ci_if_error: true
path_to_write_report: ./codecov_report.gz
31 changes: 31 additions & 0 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# This workflows will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries

name: Upload Python Package to PyPI

on:
release:
types: [created]

jobs:
deploy:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PMH_PYPI_USER }}
TWINE_PASSWORD: ${{ secrets.PMH_PYPI_PASS }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Python package
name: Lint with flake8

on:
push:
Expand All @@ -13,27 +13,21 @@ jobs:
build:

runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements_dev.txt ]; then pip install -r requirements_dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --exclude .git,docs/*
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
python check_cloudside.py --runslow
33 changes: 33 additions & 0 deletions .github/workflows/python-runtests-basic.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Run basic unit tests

on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

jobs:
build:

runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements_dev.txt ]; then pip install -r requirements_dev.txt; fi
- name: Test with pytest
run: |
python check_cloudside.py --runslow
30 changes: 30 additions & 0 deletions .github/workflows/python-runtests-img-comp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Image comparison tests

on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

jobs:
build:

runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pytest pytest-mpl docopt
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Test with pytest
run: |
export MPL_IMGCOMP_TOLERANCE=37
python check_cloudside.py --mpl
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ cloudside: download, assess, and visualize weather data
.. image:: https://travis-ci.org/Geosyntec/cloudside.svg?branch=master
:target: https://travis-ci.org/Geosyntec/cloudside

.. image:: https://coveralls.io/repos/phobson/cloudside/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/phobson/cloudside?branch=master
.. image:: https://codecov.io/gh/Geosyntec/cloudside/branch/master/graph/badge.svg?token=02qkR2vPrK
:target: https://codecov.io/gh/Geosyntec/cloudside


The problem this is solving
Expand Down
17 changes: 8 additions & 9 deletions cloudside/asos.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# std lib stuff
import datetime
import logging
import warnings
from ftplib import FTP, error_perm
Expand All @@ -8,7 +7,7 @@

import numpy
import pandas
from metar import Metar, Datatypes
from metar import Metar

from . import validate

Expand Down Expand Up @@ -118,7 +117,7 @@ def _fetch_file(
past_attempts=0,
max_attempts=10,
):
""" Fetches a single file from the ASOS ftp and returns its pathh on the
"""Fetches a single file from the ASOS ftp and returns its pathh on the
local file system
Parameters
Expand Down Expand Up @@ -192,7 +191,7 @@ def fetch_files(
force_download=False,
pbar_fxn=None,
):
""" Fetches a single file from the ASOS ftp and returns its path on the
"""Fetches a single file from the ASOS ftp and returns its path on the
local file system
Parameters
Expand Down Expand Up @@ -234,7 +233,7 @@ def fetch_files(


def _find_reset_time(precip_ts):
""" Determines the precipitation gauge's accumulation reset time.
"""Determines the precipitation gauge's accumulation reset time.
Parameters
----------
Expand Down Expand Up @@ -265,7 +264,7 @@ def get_idxmin(g):


def _process_precip(data, rt, raw_precipcol):
""" Processes precip data that accumulates hourly into raw minute
"""Processes precip data that accumulates hourly into raw minute
intensities.
Parameters
Expand Down Expand Up @@ -300,7 +299,7 @@ def _process_precip(data, rt, raw_precipcol):


def parse_file(filepath, new_precipcol="precipitation"):
""" Parses a raw ASOS/METAR file into a pandas.DataFrame
"""Parses a raw ASOS/METAR file into a pandas.DataFrame
Parameters
----------
Expand Down Expand Up @@ -342,7 +341,7 @@ def get_data(
force_download=False,
pbar_fxn=None,
):
""" Download and process a range of FAA/ASOS data files for a given station
"""Download and process a range of FAA/ASOS data files for a given station
Parameters
----------
Expand Down Expand Up @@ -373,7 +372,7 @@ def get_data(
--------
>>> from cloudside import asos
>>> from tqdm import tqdm
>>> pdx = asos.get_data('KPDX', '2010-10-01', '2013-10-31', my_email,
>>> pdx = asos.get_data('KPDX', '2013-09-01', '2013-10-31', '[email protected]',
... folder='Portland_weather', raw_folder='asos_files',
... force_download=False, pbar_fxn=tqdm)
"""
Expand Down
19 changes: 18 additions & 1 deletion cloudside/exporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,22 @@ def _write_obs(rowheader, year, month, day, obs):


def _obs_from_row(row):
"""
Parses the NCDC format illustrated below:
HPD04511406HPCPHI19480700010040100000000 1300000000M 2400000000M 2500000000I
AAABBBBBBCCCCCCDDEEEEFFGGGGHHHIIJJJJJJJJ IIJJJJJJJJK IIJJJJJJJJK IIJJJJJJJJK
Group A - the Record Type
Group B - the station COOPID
Group C - the element (i.e., parameter; e.g., precip)
Group D - units (HI = hundreths of an inch)
Group E - year
Group F - month
Group H - day
Group I - hour
Group J - observed value
Group K - qualifier
"""
values = row.strip().split()
header = list(values.pop(0))
recordtype = _pop_many(header, 3)
Expand All @@ -249,7 +265,8 @@ def _obs_from_row(row):
month = int(_pop_many(header, 2))
day = int(_pop_many(header, 4))

_count = int(_pop_many(header, 3))
# strip the the row's count of observations from the header
int(_pop_many(header, 3))

observations = ["".join(header)]
observations.extend(values)
Expand Down
4 changes: 2 additions & 2 deletions cloudside/hydra.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def _fetch_file(station_id, raw_folder, force_download=False):


def parse_file(filepath):
""" Parses a rain file downloaded from the Portland Hydra Network
"""Parses a rain file downloaded from the Portland Hydra Network
Parameters
----------
Expand Down Expand Up @@ -61,7 +61,7 @@ def parse_file(filepath):


def get_data(station_id, folder=".", raw_folder="01-raw", force_download=False):
""" Download and parse full records from Portland's Hydra Network
"""Download and parse full records from Portland's Hydra Network
Parameters
----------
Expand Down
13 changes: 4 additions & 9 deletions cloudside/ncdc.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def date_parser(x):


def remove_bad_rain_values(df, raincol="hpcp", threshold=500):
""" Filters invalid rainfall values and returns a new series.
"""Filters invalid rainfall values and returns a new series.
NCDC use 99999 1/100th of inch for invalid/missing values.
We downloaded the data in mm so NCDC coverted that value to
Expand Down Expand Up @@ -114,8 +114,6 @@ def setup_station_data(
# sort the now chaotic index
station_data.sort_index(inplace=True)

cooptxt = coopid.replace(":", "")

# generate the full index (every hour, ever day)
fulldates = pandas.date_range(
start=origin_date, end=future_date, freq=pandas.offsets.Hour(1)
Expand Down Expand Up @@ -186,7 +184,6 @@ def timediff(row, t2, t1, asdays=False):
)

if summary.shape[0] > 1:
secperhr = 60.0 * 60.0
# compute storm durations
summary["Duration Hours"] = summary.apply(
lambda row: timediff(row, "End Date", "Start Date", asdays=False), axis=1
Expand Down Expand Up @@ -236,8 +233,6 @@ def availabilityByStation(
for status, pct in zip(_statuses, _avail_pct.values)
]

cooptxt = coopid.replace(":", "")

# reset in the index and compute year and month-day-hour representations of the date
stationdata = stationdata.reset_index()[["index", "status"]]
stationdata["Yr"] = stationdata["index"].apply(lambda d: d.strftime("%Y"))
Expand Down Expand Up @@ -271,7 +266,7 @@ def availabilityByStation(
bounds = [-0.5, 0.5, 1.5, 2.5, 3.5]
norm = colors.BoundaryNorm(bounds, cmap.N)

img = ax.pcolorfast(grid, cmap=cmap, norm=norm)
ax.pcolorfast(grid, cmap=cmap, norm=norm)
ax.set_aspect(grid.shape[1] / grid.shape[0])

ax.set_yticks(numpy.arange(grid.shape[0]) + 0.5)
Expand Down Expand Up @@ -328,7 +323,7 @@ def dataAvailabilityHeatmap(data, figsize=None):
mdata = numpy.ma.masked_less(data.values, 1)

ax = fig.add_subplot(gs[1])
img = ax.pcolorfast(mdata, cmap=cmap)
ax.pcolorfast(mdata, cmap=cmap)
ax.set_xlabel("Year")
ax.set_ylabel("Precipitation Gauge")
ax.xaxis.tick_bottom()
Expand All @@ -350,7 +345,7 @@ def dataAvailabilityHeatmap(data, figsize=None):
)

cax = fig.add_subplot(gs[0])
cbar = colorbar.ColorbarBase(
colorbar.ColorbarBase(
cax, cmap=cmap, norm=norm, orientation="horizontal", extend="min"
)
cax.invert_xaxis()
Expand Down
2 changes: 0 additions & 2 deletions cloudside/storms.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import warnings

import numpy
import pandas

Expand Down
1 change: 0 additions & 1 deletion cloudside/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from functools import wraps
from contextlib import contextmanager

import cloudside

try:
import pytest
Expand Down
1 change: 0 additions & 1 deletion cloudside/tests/test_asos.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from datetime import datetime
import pathlib
import tempfile
import ftplib
Expand Down
Loading

0 comments on commit 7b1598d

Please sign in to comment.