Skip to content

Commit

Permalink
Merge pull request #139 from taichi-dev/ci/auto-publish-release
Browse files Browse the repository at this point in the history
[ci] Ci/auto publish release
  • Loading branch information
frostming committed Mar 9, 2022
2 parents 1ad43e6 + fa83198 commit 0c17174
Show file tree
Hide file tree
Showing 108 changed files with 2,838 additions and 1,021 deletions.
97 changes: 77 additions & 20 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -1,40 +1,36 @@
name: Publishing Release
on:
release:
# https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows#release
types: [published]
# When triggered by schedule and workflow_dispatch, github.event.action is an empty string.
# We use this to distinguish which taichi to release.
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
# Manually trigger the release workflow, a version must be provided
inputs:
version:
description: "The version to release (e.g. v0.8.0)"
type: string
required: true

env:
PROD_PWD: ${{ secrets.PYPI_PWD_PROD }}
NIGHT_PWD: ${{ secrets.PYPI_PWD_NIGHTLY }}
METADATA_USERNAME: ${{ secrets.METADATA_USERNAME }}
METADATA_PASSWORD: ${{ secrets.METADATA_PASSWORD }}
METADATA_URL: ${{ secrets.METADATA_URL }}
RELEASE_VERSION: ${{ github.event.inputs.version }}

jobs:
add_version_to_database:
name: Add version to database
# Skip running release workflow on forks
if: github.repository_owner == 'taichi-dev'
if: github.repository == 'taichi-dev/taichi' && github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2

- name: set tag
id: tag
run: echo ::set-output name=version::${GITHUB_REF#refs/*/}

- name: Save new version
run: |
python3 -m pip install requests==2.26
[ -z "${{ github.event.action }}" ] || python3 misc/save_new_version.py
env:
RELEASE_VERSION: ${{ steps.tag.outputs.version }}
python3 misc/save_new_version.py
# This job set environment matrix with respect to production release and nightly release.
matrix_prep:
Expand All @@ -46,17 +42,17 @@ jobs:
steps:
- id: set-matrix
run: |
if [ -z "${{ github.event.action }}" ]; then
# For nightly release, we only run on python 3.8
echo '::set-output name=matrix::{"include":[{"name":"taichi-nightly","python":"3.8","conda_python":"py38"}]}"'
# M1 only supports py38 and py39(conda), so change matrix.
echo '::set-output name=matrix_osx::{"include":[{"name":"taichi-nightly","python":"3.8"}]}"'
else
if [[ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]]; then
# For production release, we run on four python versions.
echo '::set-output name=matrix::{"include":[{"name":"taichi","python":"3.6","conda_python":"py36"},{"name":"taichi","python":"3.7","conda_python":"py37"},{"name":"taichi","python":"3.8","conda_python":"py38"},{"name":"taichi","python":"3.9","conda_python":"py39"}]}"'
echo '::set-output name=matrix_osx::{"include":[{"name":"taichi","python":"3.8"},{"name":"taichi","python":"3.9"}]}"'
else
# For nightly release, we only run on python 3.8
echo '::set-output name=matrix::{"include":[{"name":"taichi-nightly","python":"3.8","conda_python":"py38"},{"name":"taichi-nightly","python":"3.10","conda_python":"py310"}]}"'
# M1 only supports py38 and py39(conda), so change matrix.
echo '::set-output name=matrix_osx::{"include":[{"name":"taichi-nightly","python":"3.8"},{"name":"taichi-nightly","python":"3.10"}]}"'
fi
build_and_upload_linux:
Expand Down Expand Up @@ -440,3 +436,64 @@ jobs:
run: |
python -m pip install twine
venv\Scripts\python misc/upload_release.py
create_release:
name: Create tag and publish release
needs:
[
build_and_upload_linux,
build_and_upload_mac,
build_and_upload_m1,
build_and_upload_macos_1014,
build_and_upload_windows,
]
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch'
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9

- name: Generate Changelog
id: changelog
run: |
pip3 install gitpython
content=$(python3 misc/make_changelog.py)
echo $content
# Escape multiline strings:
# https://renehernandez.io/snippets/multiline-strings-as-a-job-output-in-github-actions/
content="${content//'%'/'%25'}"
content="${content//$'\n'/'%0A'}"
content="${content//$'\r'/'%0D'}"
echo "::set-output name=content::$content"
- name: Create tag
run: |
git config user.email "[email protected]"
git config user.name "Taichi Gardener"
git tag -a ${RELEASE_VERSION} -m "Release ${RELEASE_VERSION}"
git push origin --tags
- name: Publish release
uses: softprops/action-gh-release@v1
with:
body: ${{ steps.changelog.outputs.content }}
tag_name: ${{ github.event.inputs.version }}

- name: Bump version
run: |
version_parts=(${RELEASE_VERSION//./ })
version_parts[2]=$(expr ${version_parts[2]} + 1)
next_version=$(IFS=.; echo "${version_parts[*]}")
# Update version.txt
git checkout -b "bump/$next_version"
echo "$next_version" > version.txt
git add version.txt
# Commit and push changes
git commit -m "Bump version to $next_version"
git push origin "bump/$next_version"
# Create pull request
gh pr create -B master -t "[misc] Bump version to $next_version"
env:
GITHUB_TOKEN: ${{ secrets.GARDENER_PAT }}
14 changes: 8 additions & 6 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,12 @@ jobs:
runs-on: [self-hosted, windows, gpu]
timeout-minutes: 90
steps:
# See also https://github.com/taichi-dev/taichi/issues/4161
- name: Cleanup
shell: powershell
run: |
remove-item '${{ github.workspace }}\*' -recurse -force
- uses: actions/checkout@v2
with:
submodules: "recursive"
Expand All @@ -397,18 +403,14 @@ jobs:
- name: Build
shell: powershell
if: ${{ needs.check_files.outputs.run_job != 'false' }}
run: |
if ( "${{needs.check_files.outputs.run_job}}" -eq "false" ) {
exit 0
}
.\.github\workflows\scripts\win_build.ps1 -installVulkan -install -libsDir C:\
- name: Test
shell: powershell
if: ${{ needs.check_files.outputs.run_job != 'false' }}
run: |
if ( "${{needs.check_files.outputs.run_job}}" -eq "false" ) {
exit 0
}
.\.github\workflows\scripts\win_test.ps1
env:
TI_WANTED_ARCHS: cpu,cuda,opengl
Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
include MANIFEST.in
include version.txt
include python/*.txt
include python/*.py
include *.cfg
Expand Down
41 changes: 41 additions & 0 deletions benchmarks/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
Install a few of extra requirements:
```bash
python3 -m pip install -r requirements.txt
```

## Run

To run all benchmarks:
```bash
python3 run.py
```

## Result

The benchmark results will be stored in the `results` folder in your current directory.
If you wish to save the results as a single json file (`./results/results.json`):
```bash
python3 deserialize.py
```
Or you can specify the input and output path:
```bash
python3 deserialize.py --folder PATH_OF_RESULTS_FOLDER --output_path PATH_YOU_WIHS_TO_STORE
```

## Tools

After getting benchmark results (`./results`), you can use a visualization tool to profile performance problems:
```bash
python3 visualization.py
```

You can specify the results file path:
```bash
python3 visualization.py --folder PATH_OF_RESULTS_FOLDER
```

The default host and port is `localhost:5006\visualization`.
If you want to enable remote access, take the following steps:
```bash
python3 visualization.py --host YOUR_IP_ADDRESS --port PORT_YOU_WISH_TO_USE
```
99 changes: 99 additions & 0 deletions benchmarks/deserialize.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
import argparse
import json
import os
from copy import deepcopy

from utils import dump2json


class ResultsBuilder():
def __init__(self, results_file_path: str):
self._suites_result = {}
self._file_path = results_file_path
self.load_suites_result()

def load_suites_result(self):
# benchmark info
info_path = os.path.join(self._file_path, '_info.json')
with open(info_path, 'r') as f:
info_dict = json.load(f)['suites']
# suite info
for suite_name, attrs in info_dict.items():
self._suites_result[suite_name] = {}
for arch in attrs['archs']:
self._suites_result[suite_name][arch] = {}
suite_info_path = os.path.join(self._file_path, suite_name,
arch, "_info.json")
with open(suite_info_path, 'r') as f:
suite_info_dict = json.load(f)
# case info
for case_name in suite_info_dict:
items = suite_info_dict[case_name]
items.pop('name')
items['metrics'] = items.pop('get_metric')
self._suites_result[suite_name][arch][case_name] = {
'items': items
}
# cases result
for suite_name in self._suites_result:
for arch in self._suites_result[suite_name]:
for case_name in self._suites_result[suite_name][arch]:
case_info_path = os.path.join(self._file_path, suite_name,
arch, case_name + ".json")
with open(case_info_path, 'r') as f:
case_results = json.load(f)
remove_none_list = []
for name, data in case_results.items():
# remove case_name
data['tags'] = data['tags'][1:]
if data['result'] is None:
remove_none_list.append(name)
for name in remove_none_list:
case_results.pop(name)
self._suites_result[suite_name][arch][case_name][
'results'] = case_results

def get_suites_result(self):
return self._suites_result

def save_results_as_json(self, costomized_dir=None):
file_path = os.path.join(self._file_path, 'results.json')
if costomized_dir != None:
file_path = os.path.join(costomized_dir, 'results.json')
with open(file_path, 'w') as f:
print(dump2json(self._suites_result), file=f)

def print_info(self):
# remove 'results' in self._suites_result, then print
info_dict = deepcopy(self._suites_result)
for suite_name in info_dict:
for arch in info_dict[suite_name]:
for case in info_dict[suite_name][arch]:
info_dict[suite_name][arch][case].pop('results')
print(dump2json(info_dict))


if __name__ == '__main__':
parser = argparse.ArgumentParser()

parser.add_argument('-f',
'--folder',
default='./results',
dest='folder',
type=str,
help='Path of result folder. Defaults to ./results')

parser.add_argument('-o',
'--output_path',
default='./results',
dest='output_path',
type=str,
help='Path of result folder. Defaults to ./results')

args = parser.parse_args()
result_folder = args.folder
output_path = args.output_path

results = ResultsBuilder(result_folder)
results.save_results_as_json(output_path)
results.print_info()
4 changes: 3 additions & 1 deletion benchmarks/microbenchmarks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from .atomic_ops import AtomicOpsPlan
from .fill import FillPlan
from .math_opts import MathOpsPlan
from .matrix_ops import MatrixOpsPlan
from .memcpy import MemcpyPlan
from .saxpy import SaxpyPlan
from .stencil2d import Stencil2DPlan

benchmark_plan_list = [
AtomicOpsPlan, FillPlan, MathOpsPlan, MemcpyPlan, SaxpyPlan, Stencil2DPlan
AtomicOpsPlan, FillPlan, MathOpsPlan, MatrixOpsPlan, MemcpyPlan, SaxpyPlan,
Stencil2DPlan
]
Loading

0 comments on commit 0c17174

Please sign in to comment.