From 855cc862aaf64f05952b47efcf18dcc6a728daf0 Mon Sep 17 00:00:00 2001 From: Stephen Oman Date: Fri, 5 Jul 2024 20:28:50 +0100 Subject: [PATCH] v1.0.0-rc1 preparation (#22) * Add workflow job to publish to testpypi * Only build wheels when core tests are passing * Change version number to match Python version spec * Configure build steps for prereleased activity * Move Getting Started section higher in README * Remove the rcXX suffix from the cmake version as it doesn't like the Python versioning format * Bump cibuildwheel version --- .github/workflows/test_and_deploy.yml | 51 +++++++++++++++++-- CMakeLists.txt | 6 ++- README.md | 71 ++++++++++++++------------- version.txt | 2 +- 4 files changed, 87 insertions(+), 43 deletions(-) diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml index 92bd4cbf3..9e6dcc5be 100644 --- a/.github/workflows/test_and_deploy.yml +++ b/.github/workflows/test_and_deploy.yml @@ -5,7 +5,7 @@ on: branches: [main] pull_request: release: - types: [released] + types: [prereleased, released] jobs: test_repo: name: Test on ${{ matrix.os }} w/ Py${{ matrix.python-version }} @@ -114,6 +114,7 @@ jobs: path: dist/ build_wheels: name: Build wheels on Ubuntu + needs: test_repo runs-on: ubuntu-20.04 # Can be also run for macOS steps: - uses: actions/checkout@v3 @@ -125,8 +126,8 @@ jobs: with: platforms: all - name: Build wheels - if: github.event_name != 'release' && github.event.action != 'released' - uses: pypa/cibuildwheel@v2.15.0 # The main configuration is in pyproject.toml + if: github.event_name != 'release' + uses: pypa/cibuildwheel@v2.19.2 # The main configuration is in pyproject.toml env: CIBW_BUILD: "cp311-manylinux*" # Build only python 3.11 wheels for testing # Increase verbosity to see what's going on in the build in case of failure @@ -134,8 +135,8 @@ jobs: CIBW_REPAIR_WHEEL_COMMAND_LINUX: > auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel} - name: Build release wheels - if: github.event_name == 'release' && github.event.action == 'released' - uses: pypa/cibuildwheel@v2.15.0 # The main configuration is in pyproject.toml + if: github.event_name == 'release' && (github.event.action == 'released' || github.event.action == 'prereleased') + uses: pypa/cibuildwheel@v2.19.2 # The main configuration is in pyproject.toml env: # Set NLE_RELEASE_BUILD to 1 to build release wheels CIBW_ENVIRONMENT: "NLE_RELEASE_BUILD=1" @@ -175,6 +176,46 @@ jobs: python -m pytest --import-mode=append -svx $REPONAME/nle/tests popd + # Use prereleases to test publish the artefacts to testpypi + test_deploy: + name: Deploy artefacts to testpypi + needs: [test_sdist_3_11, test_manylinux_3_11] + if: github.event_name == 'release' && github.event.action == 'prereleased' + runs-on: ubuntu-latest + environment: + name: prerelease + url: https://testpypi.org/p/nle + permissions: + id-token: write + steps: + - uses: actions/checkout@v4 + - name: Check version matches release tag + run: | + echo "v$(cat version.txt)" + echo "${{ github.event.release.tag_name }}" + [[ "${{ github.event.release.tag_name }}" == "v$(cat version.txt)" ]] + - name: Get sdist artifact # Get sdist artifact from the test_sdist job + uses: actions/download-artifact@v3 + with: + name: python-sdist + path: dist + - name: Get wheels artifacts # Get wheels artifacts from the build_wheels job + uses: actions/download-artifact@v3 + with: + name: python-wheels + path: dist + - name: Report dist contents + run: | + pwd + ls -R + ls -al . + ls -R dist/ + ls -al dist/ + - name: Publish package to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + # TODO move this to separate workflow whenever github decides to provide basic # functionalities like workflow dependencies :| deploy_sdist: diff --git a/CMakeLists.txt b/CMakeLists.txt index 38e4894ca..e2e28d390 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,8 @@ cmake_minimum_required(VERSION 3.15) file(STRINGS "version.txt" NLE_VERSION) -project(nle VERSION ${NLE_VERSION}) +# Remove any rcXX suffix from the version number as CMake doesn't like it +string(REGEX REPLACE "rc[0-9+]$" "" CMAKE_NLE_VERSION ${NLE_VERSION}) +project(nle VERSION ${CMAKE_NLE_VERSION}) if(CMAKE_BUILD_TYPE MATCHES Debug) message("Debug build.") @@ -30,7 +32,7 @@ else() message("Some other build type.") endif() -message(STATUS "Building nle backend version: ${NLE_VERSION}") +message(STATUS "Building nle backend version: ${CMAKE_NLE_VERSION}") set(CMAKE_POSITION_INDEPENDENT_CODE ON) diff --git a/README.md b/README.md index 5621e38d7..e675e8a93 100644 --- a/README.md +++ b/README.md @@ -28,41 +28,6 @@ README](./README.nh), at [nethack.org](https://nethack.org/), and on the This version of NLE uses the [Farama Organisation Gymnasium Environment](https://gymnasium.farama.org) APIs. -### NLE Language Wrapper - -We thank [ngoodger](https://github.com/ngoodger) for implementing the [NLE Language Wrapper](https://github.com/ngoodger/nle-language-wrapper) that translates the non-language observations from NetHack tasks into similar language representations. Actions can also be optionally provided in text form which are converted to the Discrete actions of the NLE. - -### NetHack Learning Dataset - -The NetHack Learning Dataset (NLD) code now ships with `NLE`, allowing users to the load large-scale datasets featured in [Dungeons and Data: A Large-Scale NetHack Dataset](), while also generating and loading their own datasets. - -```python -import nle.dataset as nld - -if not nld.db.exists(): - nld.db.create() - # NB: Different methods are used for data based on NLE and data from NAO. - nld.add_nledata_directory("/path/to/nld-aa", "nld-aa-v0") - nld.add_altorg_directory("/path/to/nld-nao", "nld-nao-v0") - -dataset = nld.TtyrecDataset("nld-aa-v0", batch_size=128, ...) -for i, mb in enumerate(dataset): - foo(mb) # etc... -``` - -For information on how to download NLD-AA and NLD-NAO, see the dataset doc [here](./DATASET.md). - -Otherwise checkout the tutorial Colab notebook [here](https://colab.research.google.com/drive/1GRP15SbOEDjbyhJGMDDb2rXAptRQztUD?usp=sharing). - -# Papers using the NetHack Learning Environment -- Izumiya and Simo-Serra [Inventory Management with Attention-Based Meta Actions](https://esslab.jp/~ess/publications/IzumiyaCOG2021.pdf) (Waseda University, CoG 2021). -- Samvelyan et al. [MiniHack the Planet: A Sandbox for Open-Ended Reinforcement Learning Research](https://arxiv.org/abs/2109.13202) (FAIR, UCL, Oxford, NeurIPS 2021). -- Zhang et al. [BeBold: Exploration Beyond the Boundary of Explored Regions](https://arxiv.org/abs/2012.08621) (Berkley, FAIR, Dec 2020). -- Küttler et al. [The NetHack Learning Environment](https://arxiv.org/abs/2006.13760) (FAIR, Oxford, NYU, Imperial, UCL, NeurIPS 2020). - -Open a [pull -request](https://github.com/heiner/nle/edit/main/README.md) -to add papers. # Getting started @@ -200,6 +165,42 @@ $ python -m nle.scripts.plot steps ``` +### NLE Language Wrapper + +We thank [ngoodger](https://github.com/ngoodger) for implementing the [NLE Language Wrapper](https://github.com/ngoodger/nle-language-wrapper) that translates the non-language observations from NetHack tasks into similar language representations. Actions can also be optionally provided in text form which are converted to the Discrete actions of the NLE. + +### NetHack Learning Dataset + +The NetHack Learning Dataset (NLD) code now ships with `NLE`, allowing users to the load large-scale datasets featured in [Dungeons and Data: A Large-Scale NetHack Dataset](), while also generating and loading their own datasets. + +```python +import nle.dataset as nld + +if not nld.db.exists(): + nld.db.create() + # NB: Different methods are used for data based on NLE and data from NAO. + nld.add_nledata_directory("/path/to/nld-aa", "nld-aa-v0") + nld.add_altorg_directory("/path/to/nld-nao", "nld-nao-v0") + +dataset = nld.TtyrecDataset("nld-aa-v0", batch_size=128, ...) +for i, mb in enumerate(dataset): + foo(mb) # etc... +``` + +For information on how to download NLD-AA and NLD-NAO, see the dataset doc [here](./DATASET.md). + +Otherwise checkout the tutorial Colab notebook [here](https://colab.research.google.com/drive/1GRP15SbOEDjbyhJGMDDb2rXAptRQztUD?usp=sharing). + +# Papers using the NetHack Learning Environment +- Izumiya and Simo-Serra [Inventory Management with Attention-Based Meta Actions](https://esslab.jp/~ess/publications/IzumiyaCOG2021.pdf) (Waseda University, CoG 2021). +- Samvelyan et al. [MiniHack the Planet: A Sandbox for Open-Ended Reinforcement Learning Research](https://arxiv.org/abs/2109.13202) (FAIR, UCL, Oxford, NeurIPS 2021). +- Zhang et al. [BeBold: Exploration Beyond the Boundary of Explored Regions](https://arxiv.org/abs/2012.08621) (Berkley, FAIR, Dec 2020). +- Küttler et al. [The NetHack Learning Environment](https://arxiv.org/abs/2006.13760) (FAIR, Oxford, NYU, Imperial, UCL, NeurIPS 2020). + +Open a [pull +request](https://github.com/heiner/nle/edit/main/README.md) +to add papers. + # Contributing diff --git a/version.txt b/version.txt index 3eefcb9dd..6a056a8b1 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -1.0.0 +1.0.0rc1