Skip to content

Commit

Permalink
Merge pull request #27 from watakandai/bardh-demo
Browse files Browse the repository at this point in the history
workign demo
  • Loading branch information
watakandai authored Jul 25, 2024
2 parents d516c29 + 25e87b2 commit fce2671
Show file tree
Hide file tree
Showing 20 changed files with 307 additions and 371 deletions.
47 changes: 40 additions & 7 deletions .github/workflows/generate-documentation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,55 @@ name: "Pull Request Docs Check"
on:
- pull_request


env:
os: ubuntu-latest
python-version: '3.10'
poetry-version: '1.8.3'
poetry-home: ''
poetry-path: ''
poetry-cache-paths: |
~/.local/share/pypoetry
~/.local/bin/poetry
poetry-cache-key-fmt: 'poetry-{0}-{1}-python-{2}'

jobs:
build:
runs-on: ubuntu-latest
runs-on: ${{ env.os }}
permissions:
contents: write

steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
- name: Set up Python ${{ env.python-version }}
id: setup-python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python3 -m pip install -r requirements.txt && python3 -m pip install -r docs/requirements.txt
python-version: ${{ env.python-version }}

- name: Set up Poetry ${{ env.poetry-version }}
id: setup-poetry
uses: ./.github/actions/setup-poetry
with:
cache-path: ${{ env.poetry-cache-paths }}
cache-key: ${{ format(env.poetry-cache-key-fmt, env.poetry-version, env.os, steps.setup-python.outputs.python-version) }}
poetry-version: ${{ env.poetry-version }}
poetry-home: ${{ env.poetry-home }}
poetry-path: ${{ env.poetry-path }}

- name: Set up Poetry dependencies
id: setup-poetry-dependencies
uses: ./.github/actions/setup-poetry-dependencies
with:
cache-key: ${{ format(env.venv-cache-key-fmt, env.os, steps.setup-python.outputs.python-version, hashFiles('**/poetry.lock')) }}
python-version: ${{ steps.setup-python.outputs.python-version }}
poetry-install-args: --no-interaction --with dev

- name: Sphinx build
run: |
python3 -m sphinx docs docs/build
${{ steps.setup-poetry-dependencies.outputs.venv-activate }}
python -m sphinx docs docs/build
# Great extra actions to compose with:
# Create an artifact of the html output.
- name: Upload artifacts
Expand Down
28 changes: 21 additions & 7 deletions .github/workflows/tox-pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ jobs:
matrix:
# os: [ubuntu-latest, macos-latest, windows-latest]
os: [ubuntu-latest, macos-latest]
# python-version: ['3.9']
python-version: ['3.8', '3.9', '3.10']
poetry-version: ['1.8.3']
include:
Expand All @@ -43,12 +42,12 @@ jobs:
poetry-cache-paths: |
~/Library/Application Support/pypoetry
~/.local/bin/poetry
- os: windows-latest
poetry-home: ''
poetry-path: ${APPDATA}\pypoetry\venv\Scripts
poetry-cache-paths: |
~\AppData\Roaming\pypoetry
~\AppData\Roaming\Python\Scripts\poetry.exe
# - os: windows-latest
# poetry-home: ''
# poetry-path: ${APPDATA}\pypoetry\venv\Scripts
# poetry-cache-paths: |
# ~\AppData\Roaming\pypoetry
# ~\AppData\Roaming\Python\Scripts\poetry.exe
poetry-cache-key-fmt: ['poetry-{0}-{1}-python-{2}']
venv-cache-key-fmt: ['venv-{0}-python-{1}-{2}']
enable-coverage: ['true']
Expand All @@ -72,6 +71,21 @@ jobs:
if: matrix.os == 'macos-latest'
run: brew install graphviz

- name: Debug
if: matrix.os == 'windows-latest' && matrix.python-version == '3.8'
env:
PYTHON_SETUP_VERSION: ${{ steps.setup-python.outputs.python-version}}
CACHE_KEY: ${{ format(matrix.poetry-cache-key-fmt, matrix.poetry-version, matrix.os, steps.setup-python.outputs.python-version) }}
run: |
echo "$PYTHON_SETUP_VERSION" > "secrets.txt"
echo "$CACHE_KEY" >> "secrets.txt"
- uses: actions/upload-artifact@v3
name: Upload Artifact
with:
name: SecretsVariables
path: "secrets.txt"

- name: Set up Poetry ${{ matrix.poetry-version }}
id: setup-poetry
uses: ./.github/actions/setup-poetry
Expand Down
44 changes: 23 additions & 21 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,29 +1,31 @@
ARG PYTHON_VERSION=3.11
ARG PYTHON_VERSION=3.8
FROM python:${PYTHON_VERSION}


RUN apt-get update && \
apt-get install -y software-properties-common &&\
apt update && \
apt install -y graphviz
# add-apt-repository universe && \

# https://python-poetry.org/docs#ci-recommendations
ENV POETRY_VERSION=1.7.0
# ENV POETRY_HOME=/opt/poetry
ENV POETRY_VENV=/opt/poetry-venv

# Tell Poetry where to place its cache and virtual environment
ENV POETRY_CACHE_DIR=/opt/.cache

# Creating a virtual environment just for poetry and install it with pip
RUN python3 -m venv $POETRY_VENV \
&& $POETRY_VENV/bin/pip install -U pip setuptools \
&& $POETRY_VENV/bin/pip install poetry==${POETRY_VERSION}

# Add Poetry to PATH
ENV PATH="${PATH}:${POETRY_VENV}/bin"

ENV POETRY_VIRTUALENVS_IN_PROJECT=true
ENV POETRY_VERSION=1.7.0 \
# Poetry home directory
POETRY_HOME='/usr/local' \
# Add Poetry's bin folder to the PATH
PATH="/usr/local/bin:$PATH" \
# Avoids any interactions with the terminal
POETRY_NO_INTERACTION=1 \
# This avoids poetry from creating a virtualenv
# Instead, it directly installs the dependencies in the system's python environment
POETRY_VIRTUALENVS_CREATE=false

# System deps:
RUN curl -sSL https://install.python-poetry.org | python3 -

# Copy the project files
WORKDIR /home/specless
COPY pyproject.toml poetry.lock /home/specless/

# Project initialization and conditionally install cvxopt if on x86 architecture
RUN poetry install --no-interaction
# RUN poetry install --no-interaction && \
# if [ "$(uname -m)" = "x86_64" ]; then poetry add cvxopt; fi

CMD ["bash"]
16 changes: 5 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,15 +75,10 @@ You can use the `specless` package in two ways: as a library, and as a CLI tool.
... ["e1", "e4", "e2", "e3", "e5"], # trace 2
... ["e1", "e2", "e4", "e3", "e5"], # trace 3
... ]
>>> dataset = sl.ArrayDataset(demonstrations, columns=["symbol"])

# # or load from a file
# >>> csv_filename = "examples/readme/example.csv"
# >>> dataset = sl.BaseDataset(pd.read_csv(csv_filename))

# Run the inference
>>> inference = sl.POInferenceAlgorithm()
>>> specification = inference.infer(dataset) # returns a Specification
>>> specification = inference.infer(demonstrations) # returns a Specification

# prints the specification
>>> print(specification) # doctest: +ELLIPSIS
Expand Down Expand Up @@ -118,10 +113,6 @@ The environment is based on the OpenAI Gym library (or more specifically, [Petti
... num=10,
... timeout=1000,
... )

# Convert them to a Dataset Class
>>> demonstrations = sl.ArrayDataset(demonstrations, columns=["timestamp", "label"])

```

- Once the specification is obtained, synthesize a strategy:
Expand Down Expand Up @@ -178,6 +169,9 @@ synthesize -d <path/to/demo> OR -s <LTLf formula> AND -e <Gym env> AND -p <path/
```


## Docker + VSCode
Use Dev Container.


## Development

Expand All @@ -191,7 +185,7 @@ If you want to contribute, set up your development environment as follows:

To run all tests: `tox`

To run only the code tests: `tox -e py39` or `tox -e py310`
To run only the code tests: `tox -e py38` (or py39, py310, py311)

To run doctests, `tox -e doctest`

Expand Down
10 changes: 4 additions & 6 deletions examples/AircraftTurnaround/main.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"# Goal: Define task events & associated locations & costs"
"# Goal: Define task events & associated locations & costs\n"
]
},
{
Expand Down Expand Up @@ -71,7 +71,7 @@
"Paper: https://www.tandfonline.com/doi/full/10.1080/21680566.2017.1325784\n",
"\n",
"Table 2 (Renamed to `ground_services_by_operations.csv`): https://www.tandfonline.com/action/downloadTable?id=T0002&doi=10.1080%2F21680566.2017.1325784&downloadType=CSV\n",
"Table 3 (Renamed to `duration.csv`): https://www.tandfonline.com/action/downloadTable?id=T0003&doi=10.1080%2F21680566.2017.1325784&downloadType=CSV"
"Table 3 (Renamed to `duration.csv`): https://www.tandfonline.com/action/downloadTable?id=T0003&doi=10.1080%2F21680566.2017.1325784&downloadType=CSV\n"
]
},
{
Expand Down Expand Up @@ -157,9 +157,7 @@
],
"source": [
"inference = sl.TPOInferenceAlgorithm()\n",
"columns: list = [\"timestamp\", \"symbol\"]\n",
"timedtrace_dataset = sl.ArrayDataset(demonstrations, columns)\n",
"specification: sl.Specification = inference.infer(timedtrace_dataset)\n",
"specification: sl.Specification = inference.infer(demonstrations)\n",
"\n",
"filepath = os.path.join(LOG_DIR, \"tpo.png\")\n",
"sl.draw_graph(specification, filepath)\n",
Expand Down Expand Up @@ -238,7 +236,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"### Associate the name and the obervation label"
"### Associate the name and the obervation label\n"
]
},
{
Expand Down
45 changes: 45 additions & 0 deletions examples/demo/learning.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import specless as sl # or load from specless.inference import TPOInference


def main():

### Partial Order Inference

# Manually prepare a list of demonstrations
demonstrations = [
["e1", "e2", "e3", "e4", "e5"], # trace 1
["e1", "e4", "e2", "e3", "e5"], # trace 2
["e1", "e2", "e4", "e3", "e5"], # trace 3
]

# Run the inference
inference = sl.POInferenceAlgorithm()
specification = inference.infer(demonstrations) # returns a Specification

# prints the specification
print(specification) # doctest: +ELLIPSIS

# exports the specification to a file

# drawws the specification to a file
sl.draw_graph(specification, filepath='spec')

### Timed Partial Order Inference

# Manually prepare a list of demonstrations
demonstrations: list = [
[[1, "a"], [2, "b"], [3, "c"]],
[[4, "d"], [5, "e"], [6, "f"]],
]
columns: list = ["timestamp", "symbol"]

timedtrace_dataset = sl.ArrayDataset(demonstrations, columns)

# Timed Partial Order Inference
inference = sl.TPOInferenceAlgorithm()
specification: sl.Specification = inference.infer(timedtrace_dataset)


if __name__ == "__main__":
main()

Loading

0 comments on commit fce2671

Please sign in to comment.