diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 00b8923ee..daa3bdde8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -59,7 +59,7 @@ jobs: - name: "Software Install - Python" run: python -m pip install \ setuptools \ - numpy \ + "numpy<2" \ matplotlib \ contextlib2 \ simplejson \ @@ -119,7 +119,7 @@ jobs: UNITTEST_OS: ${{ matrix.os }} UNITTEST_PY: ${{ matrix.python-version }} if: ${{ matrix.os == 'self-hosted' && matrix.python-version == '3.8' }} - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v4 with: files: ./test/coverage.xml, ./testbench/coverage.xml env_vars: UNITTEST_OS,UNITTEST_PY diff --git a/CHANGELOG b/CHANGELOG index beb655822..32072a898 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -8,6 +8,7 @@ * Added set_stream and get_stream to bifrost.device to help control which CUDA stream is used * Added bifrost.device.ExternalStream as a context manager to help with mixing Bifrost and cupy/pycuda * Fixed a problem calling bifrost.reduce on a slice of an array + * Added the astype() method to `bifrost.ndarray` 0.10.0 * Switched over to an autotools-based build system diff --git a/Makefile.in b/Makefile.in index 29c5728b4..cfb60f12e 100644 --- a/Makefile.in +++ b/Makefile.in @@ -7,8 +7,12 @@ DAT_DIR = share SRC_DIR = src HAVE_PYTHON = @HAVE_PYTHON@ + HAVE_DOCKER = @HAVE_DOCKER@ +CAN_BUILD_CXX_DOCS = @HAVE_CXX_DOCS@ +CAN_BUILD_PYTHON_DOCS = @HAVE_PYTHON_DOCS@ + BIFROST_PYTHON_DIR = python all: libbifrost python @@ -63,8 +67,13 @@ ifeq ($(HAVE_PYTHON),1) endif .PHONY: uninstall -doc: $(INC_DIR)/bifrost/*.h Doxyfile +doc: $(INC_DIR)/bifrost/*.h Doxyfile docs/source/*.rst docs/source/*.py +ifeq ($(CAN_BUILD_CXX_DOCS),1) @DX_DOXYGEN@ Doxyfile +endif +ifeq ($(CAN_BUILD_PYTHON_DOCS),1) + $(MAKE) -C docs singlehtml +endif .PHONY: doc python: libbifrost diff --git a/README.md b/README.md index 6bee4c149..955757048 100644 --- a/README.md +++ b/README.md @@ -97,8 +97,12 @@ go to the following [link](https://colab.research.google.com/github/ledatelescop ### C Dependencies +If using Ubuntu or another Debian-based linux distribution: + $ sudo apt-get install exuberant-ctags +Otherwise check https://ctags.sourceforge.net/ for install instructions. + ### Python Dependencies * numpy @@ -171,8 +175,17 @@ your machine. ### Building the Docs from Scratch -Install sphinx and breathe using pip, and also install Doxygen. +Install breathe using pip: + + $ sudo pip install breathe sphinx + +Also install Doxygen using your package manager. +In Ubuntu, for example: + + $ sudo apt-get install doxygen +Otherwise check https://www.doxygen.nl/ for Doxygen install instructions. + Doxygen documentation can be generated by running: $ make doc diff --git a/configure b/configure index 2fc51bfdd..75ef17de0 100755 --- a/configure +++ b/configure @@ -664,6 +664,11 @@ MAP_KERNEL_STDCXX PACKAGE_VERSION_MICRO PACKAGE_VERSION_MINOR PACKAGE_VERSION_MAJOR +HAVE_PYTHON_DOCS +HAVE_CXX_DOCS +PYTHON_BREATHE +PYTHON_SPHINXA +PYTHON_SPHINXB DX_RULES PAPER_SIZE DOXYGEN_PAPER_SIZE @@ -894,6 +899,9 @@ enable_doxygen_chi enable_doxygen_html enable_doxygen_ps enable_doxygen_pdf +with_sphinx_build +with_sphinx_apidoc +with_breathe_apidoc ' ac_precious_vars='build_alias host_alias @@ -911,7 +919,10 @@ CXXCPP CTAGS PYTHON DOCKER -DOXYGEN_PAPER_SIZE' +DOXYGEN_PAPER_SIZE +PYTHON_SPHINXB +PYTHON_SPHINXA +PYTHON_BREATHE' # Initialize some variables set by options. @@ -1555,7 +1566,7 @@ Optional Features: --disable-python disable building the Python bindings (default=no) --disable-doxygen-doc don't generate any doxygen documentation --enable-doxygen-dot generate graphics for doxygen documentation - --disable-doxygen-man don't generate doxygen manual pages + --enable-doxygen-man generate doxygen manual pages --enable-doxygen-rtf generate doxygen RTF documentation --enable-doxygen-xml generate doxygen XML documentation --enable-doxygen-chm generate doxygen compressed HTML help documentation @@ -1598,6 +1609,12 @@ Optional Packages: --with-pybuild-flags build flags for python (default='') --with-pyinstall-flags install flags for python (default='') --with-docker=[PATH] absolute path to docker executable + --with-sphinx-build=[PATH] + absolute path to sphinx-build executable + --with-sphinx-apidoc=[PATH] + absolute path to sphinx-apidoc executable + --with-breathe-apidoc=[PATH] + absolute path to breathe-apidoc executable Some influential environment variables: CC C compiler command @@ -1617,6 +1634,12 @@ Some influential environment variables: DOCKER Absolute path to docker executable DOXYGEN_PAPER_SIZE a4wide (default), a4, letter, legal or executive + PYTHON_SPHINXB + Absolute path to sphinx-build executable + PYTHON_SPHINXA + Absolute path to sphinx-apidoc executable + PYTHON_BREATHE + Absolute path to breathe-apidoc executable Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. @@ -22951,15 +22974,6 @@ fi - - - - - - - - - # Files: DX_PROJECT=bifrost @@ -23458,7 +23472,7 @@ esac else $as_nop -DX_FLAG_man=1 +DX_FLAG_man=0 test "$DX_FLAG_doc" = "1" || DX_FLAG_man=0 @@ -25048,7 +25062,491 @@ DX_RULES="${DX_SNIPPET_doc}" #echo DX_ENV=$DX_ENV -# + + + + + + + + + +if test x${HAVE_PYTHON} == x1 +then : + + + + + + + + + + + if test -z "$PYTHON_SPHINXB" +then : + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether sphinx-build executable path has been provided" >&5 +printf %s "checking whether sphinx-build executable path has been provided... " >&6; } + +# Check whether --with-sphinx-build was given. +if test ${with_sphinx_build+y} +then : + withval=$with_sphinx_build; + if test "$withval" != yes && test "$withval" != no +then : + + PYTHON_SPHINXB="$withval" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SPHINXB" >&5 +printf "%s\n" "$PYTHON_SPHINXB" >&6; } + +else $as_nop + + PYTHON_SPHINXB="" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + if test "$withval" != no +then : + + # Extract the first word of "sphinx-build", so it can be a program name with args. +set dummy sphinx-build; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON_SPHINXB+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON_SPHINXB in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON_SPHINXB="$PYTHON_SPHINXB" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON_SPHINXB="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON_SPHINXB=$ac_cv_path_PYTHON_SPHINXB +if test -n "$PYTHON_SPHINXB"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SPHINXB" >&5 +printf "%s\n" "$PYTHON_SPHINXB" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + +fi + +fi + +else $as_nop + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + # Extract the first word of "sphinx-build", so it can be a program name with args. +set dummy sphinx-build; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON_SPHINXB+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON_SPHINXB in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON_SPHINXB="$PYTHON_SPHINXB" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON_SPHINXB="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON_SPHINXB=$ac_cv_path_PYTHON_SPHINXB +if test -n "$PYTHON_SPHINXB"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SPHINXB" >&5 +printf "%s\n" "$PYTHON_SPHINXB" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + +fi + + +fi + + + + + + + + + + + + + + + + + if test -z "$PYTHON_SPHINXA" +then : + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether sphinx-apidoc executable path has been provided" >&5 +printf %s "checking whether sphinx-apidoc executable path has been provided... " >&6; } + +# Check whether --with-sphinx-apidoc was given. +if test ${with_sphinx_apidoc+y} +then : + withval=$with_sphinx_apidoc; + if test "$withval" != yes && test "$withval" != no +then : + + PYTHON_SPHINXA="$withval" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SPHINXA" >&5 +printf "%s\n" "$PYTHON_SPHINXA" >&6; } + +else $as_nop + + PYTHON_SPHINXA="" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + if test "$withval" != no +then : + + # Extract the first word of "sphinx-apidoc", so it can be a program name with args. +set dummy sphinx-apidoc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON_SPHINXA+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON_SPHINXA in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON_SPHINXA="$PYTHON_SPHINXA" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON_SPHINXA="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON_SPHINXA=$ac_cv_path_PYTHON_SPHINXA +if test -n "$PYTHON_SPHINXA"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SPHINXA" >&5 +printf "%s\n" "$PYTHON_SPHINXA" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + +fi + +fi + +else $as_nop + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + # Extract the first word of "sphinx-apidoc", so it can be a program name with args. +set dummy sphinx-apidoc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON_SPHINXA+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON_SPHINXA in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON_SPHINXA="$PYTHON_SPHINXA" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON_SPHINXA="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON_SPHINXA=$ac_cv_path_PYTHON_SPHINXA +if test -n "$PYTHON_SPHINXA"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SPHINXA" >&5 +printf "%s\n" "$PYTHON_SPHINXA" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + +fi + + +fi + + + + + + + + + + + + + + + + + if test -z "$PYTHON_BREATHE" +then : + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether breathe-apidoc executable path has been provided" >&5 +printf %s "checking whether breathe-apidoc executable path has been provided... " >&6; } + +# Check whether --with-breathe-apidoc was given. +if test ${with_breathe_apidoc+y} +then : + withval=$with_breathe_apidoc; + if test "$withval" != yes && test "$withval" != no +then : + + PYTHON_BREATHE="$withval" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_BREATHE" >&5 +printf "%s\n" "$PYTHON_BREATHE" >&6; } + +else $as_nop + + PYTHON_BREATHE="" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + if test "$withval" != no +then : + + # Extract the first word of "breathe-apidoc", so it can be a program name with args. +set dummy breathe-apidoc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON_BREATHE+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON_BREATHE in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON_BREATHE="$PYTHON_BREATHE" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON_BREATHE="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON_BREATHE=$ac_cv_path_PYTHON_BREATHE +if test -n "$PYTHON_BREATHE"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_BREATHE" >&5 +printf "%s\n" "$PYTHON_BREATHE" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + +fi + +fi + +else $as_nop + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + # Extract the first word of "breathe-apidoc", so it can be a program name with args. +set dummy breathe-apidoc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON_BREATHE+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON_BREATHE in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON_BREATHE="$PYTHON_BREATHE" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON_BREATHE="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON_BREATHE=$ac_cv_path_PYTHON_BREATHE +if test -n "$PYTHON_BREATHE"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON_BREATHE" >&5 +printf "%s\n" "$PYTHON_BREATHE" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + +fi + + +fi + + + + + + +fi + +HAVE_CXX_DOCS=0 + +HAVE_PYTHON_DOCS=0 + +if test x${DX_DOXYGEN} == x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: missing doxygen, documentation cannot be built" >&5 +printf "%s\n" "$as_me: WARNING: missing doxygen, documentation cannot be built" >&2;} +else $as_nop + HAVE_CXX_DOCS=1 + + if test x${PYTHON} != xno +then : + if test x${PYTHON_SPHINXB} = x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: missing the sphinx-build, python documentation cannot not be built" >&5 +printf "%s\n" "$as_me: WARNING: missing the sphinx-build, python documentation cannot not be built" >&2;} +else $as_nop + if test x${PYTHON_SPHINXA} = x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: missing the sphinx-apidoc, python documentation cannot not be built" >&5 +printf "%s\n" "$as_me: WARNING: missing the sphinx-apidoc, python documentation cannot not be built" >&2;} +else $as_nop + if test x${PYTHON_BREATHE} = x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: missing the breathe-apidoc, python documentation cannot not be built" >&5 +printf "%s\n" "$as_me: WARNING: missing the breathe-apidoc, python documentation cannot not be built" >&2;} +else $as_nop + HAVE_PYTHON_DOCS=1 + +fi +fi +fi +fi +fi + + # Version splitting # @@ -25180,7 +25678,7 @@ then : fi -ac_config_files="$ac_config_files config.mk Makefile src/Makefile python/Makefile share/bifrost.pc src/bifrost/config.h" +ac_config_files="$ac_config_files config.mk Makefile src/Makefile python/Makefile docs/Makefile share/bifrost.pc src/bifrost/config.h" cat >confcache <<\_ACEOF @@ -26280,6 +26778,7 @@ do "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; "python/Makefile") CONFIG_FILES="$CONFIG_FILES python/Makefile" ;; + "docs/Makefile") CONFIG_FILES="$CONFIG_FILES docs/Makefile" ;; "share/bifrost.pc") CONFIG_FILES="$CONFIG_FILES share/bifrost.pc" ;; "src/bifrost/config.h") CONFIG_FILES="$CONFIG_FILES src/bifrost/config.h" ;; diff --git a/configure.ac b/configure.ac index 2033cc92f..618faebbd 100644 --- a/configure.ac +++ b/configure.ac @@ -340,6 +340,7 @@ AS_IF([test x${DOCKER} != xno], # Documentation # +DX_INIT_DOXYGEN([bifrost]) DX_DOT_FEATURE(OFF) DX_HTML_FEATURE(ON) DX_CHM_FEATURE(OFF) @@ -349,9 +350,27 @@ DX_RTF_FEATURE(OFF) DX_XML_FEATURE(OFF) DX_PDF_FEATURE(ON) DX_PS_FEATURE(ON) -DX_INIT_DOXYGEN([bifrost]) -# +AS_IF([test x${HAVE_PYTHON} == x1], + [AX_WITH_PROG(PYTHON_SPHINXB, sphinx-build) + AX_WITH_PROG(PYTHON_SPHINXA, sphinx-apidoc) + AX_WITH_PROG(PYTHON_BREATHE, breathe-apidoc)]) + +AC_SUBST(HAVE_CXX_DOCS, 0) +AC_SUBST(HAVE_PYTHON_DOCS, 0) +AS_IF([test x${DX_DOXYGEN} == x], + [AC_MSG_WARN([missing doxygen, documentation cannot be built])], + [AC_SUBST(HAVE_CXX_DOCS, 1) + AS_IF([test x${PYTHON} != xno], + [AS_IF([test x${PYTHON_SPHINXB} = x], + [AC_MSG_WARN([missing the sphinx-build, python documentation cannot not be built])], + [AS_IF([test x${PYTHON_SPHINXA} = x], + [AC_MSG_WARN([missing the sphinx-apidoc, python documentation cannot not be built])], + [AS_IF([test x${PYTHON_BREATHE} = x], + [AC_MSG_WARN([missing the breathe-apidoc, python documentation cannot not be built])], + [AC_SUBST(HAVE_PYTHON_DOCS, 1)])])])])]) + + # Version splitting # @@ -428,7 +447,7 @@ AS_IF([test x$HAVE_CUDA != x0], CPPFLAGS="$CPPFLAGS -I$CUDA_HOME/include" ]) -AC_CONFIG_FILES([config.mk Makefile src/Makefile python/Makefile share/bifrost.pc src/bifrost/config.h]) +AC_CONFIG_FILES([config.mk Makefile src/Makefile python/Makefile docs/Makefile share/bifrost.pc src/bifrost/config.h]) AC_OUTPUT diff --git a/docs/Makefile b/docs/Makefile.in similarity index 66% rename from docs/Makefile rename to docs/Makefile.in index f7d093d3f..8d4d63995 100644 --- a/docs/Makefile +++ b/docs/Makefile.in @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = sphinx-build +SPHINXBUILD = @PYTHON_SPHINXB@ SPHINXPROJ = bifrost SOURCEDIR = source BUILDDIR = build @@ -20,15 +20,15 @@ help: @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) generate_python_reference: - sphinx-apidoc -o source -d 5 --force ../python/bifrost/ + @PYTHON_SPHINXA@ -o source -d 5 --force ../python/bifrost/ rm source/modules.rst - sed -i '1s/.*/Python Reference/' source/bifrost.rst - sed -i '2s/.*/================/' source/bifrost.rst - sed -i '1s/.*/Block Library Reference/' source/bifrost.blocks.rst - sed -i '2s/.*/=======================/' source/bifrost.blocks.rst + @SED@ -i '1s/.*/Python Reference/' source/bifrost.rst + @SED@ -i '2s/.*/================/' source/bifrost.rst + @SED@ -i '1s/.*/Block Library Reference/' source/bifrost.blocks.rst + @SED@ -i '2s/.*/=======================/' source/bifrost.blocks.rst .PHONY: generate_python_reference generate_cpp_reference: - breathe-apidoc -o source -p bifrost --force ./doxygen/xml/ + @PYTHON_BREATHE@ -o source -p bifrost --force ./doxygen/xml/ rm -rf source/file .PHONY: generate_cpp_reference diff --git a/docs/README.rst b/docs/README.rst index f757e304f..f56d4f41c 100644 --- a/docs/README.rst +++ b/docs/README.rst @@ -10,3 +10,9 @@ Inside the `docs` folder, execute `./docker_build_docs.sh`, which will create a container called `bifrost_docs`, then run it, and have it complete the docs-building process for you, outputting the entire html documentation inside `docs/html`. + +If you are not using Docker, ensure that you have "sphinx", "breathe", +and "doxygen" installed. In the parent directory run "doxygen Doxyfile." +Return to the docs directory, where you can run, for example, +"make singlehtml" where "singlehtml" can be replaced +by the format you wish the docs to be in. diff --git a/docs/source/Cpp-Development.rst b/docs/source/Cpp-Development.rst index 21f6a59bd..061c87ed5 100644 --- a/docs/source/Cpp-Development.rst +++ b/docs/source/Cpp-Development.rst @@ -81,3 +81,78 @@ Create a Ring Buffer and Load Data bfRingSpanRelease(my_read_span); bfRingSequenceClose(my_read_sequence); bfRingDestroy(my_ring); //delete the ring from memory + +Adding New Packet Formats +------------------------ + +A wide variety of packet formats are already included in Bifrost. +For simplicity, it is likely preferable to make use of these pre-existing +formats. In the case that this becomes infeasible, here are some of what +is necessary in order to add a new format to Bifrost. + +Files to edit: + +1. ``python/bifrost/packet_capture.py`` + + * Add ``set_mypacket`` to the ``PacketCaptureCallback`` class. It will likely look + very similar to the ``set_chips`` method. + +2. ``src/bifrost/packet_capture.h`` + + * This is for ctypesgen. Add a typedef for the sequence callback. This typedef + corresponds to the sequence callback used in the packet reader, see the sections + on ``test_udp_io.py`` and ``test_disk_io.py`` for examples of writing the packet reader. + * Also declare the capture callback. + +3. ``src/formats/format.hpp`` + + * Add a one-line ``#include "mypacket.hpp"`` + +4. `src/formats/mypacket.hpp` + + * This is the only file that will need to be fully written from scratch. The + easiest way to proceed is to copy the most similar existing packet format and + modify it accordingly. One will need to make sure that the header is defined + properly and that the correct information is going into it, and one will need + to make sure that the `memcpy` operation is properly filling the packet with + data. + +5. ``src/packet_capture.cpp`` + + * Need to add a call to the packet capture callback. + +6. ``src/packet_capture.hpp`` + + * This is where you will spend most of your time. Add your packet capture sequence + callback to the ``BFpacketcapture_callback_impl`` initialization list. Immediately + after the initialization list, add the ``set_mypacket`` and ``get_mypacket`` methods. + * Add a new class: ``BFpacketcapture_mypacket_impl``. In the case of simpler packet + formats, this may be very close to the already written ``BFpacketcapture_chips_impl``. + It's probably best to start by copying the format that is closest to the format + you are writing and modify it. + * In ``BFpacketcapture_create``, add the format to the first long if-else if statement. + This section tells the disk writer the size of the packet to expect. Then add your + packet to the second if-else if statement. + +7. ``src/packet_writer.hpp`` + + * After the ``BFpacketwriter_generic_impl``, add a class ``BFpacketwriter_mypacket_impl``. + Take care to choose the correct BF\_DTYPE\_???. + * In ``BFpacketwriter_create``, add your packet to the first if-else if statement. + Note that nsamples needs to correspond to the number elements in the data portion + of the packet. Then add your packet to the third if-else if statement along all + the other formats. + +8. ``test/test_disk_io.py`` + + * Add a reader for your packet format. This reader will be what is used in the actual + code as well. It contains the sequence callback that we declared in the ``src/bifrost/packet_capture.h`` + file. Note that the header in this sequence callback is the ring header not the + packet header. + * You will also need to add a ``_get_mypacket_data``, ``test_write_mypacket``, + and ``test_read_mypacket``. + +9. ``test/test_udp_io.py`` + + * The UDP version of ``test_disk_io``. Once you have written the disk I/O test, this + test is fairly simple to implement, provided you wrote it correctly. diff --git a/python/bifrost/ndarray.py b/python/bifrost/ndarray.py index ba9a88385..1b43d5ac3 100644 --- a/python/bifrost/ndarray.py +++ b/python/bifrost/ndarray.py @@ -1,5 +1,5 @@ -# Copyright (c) 2016-2023, The Bifrost Authors. All rights reserved. +# Copyright (c) 2016-2024, The Bifrost Authors. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions @@ -40,7 +40,7 @@ import ctypes import numpy as np from bifrost.memory import raw_malloc, raw_free, raw_get_space, space_accessible -from bifrost.libbifrost import _bf, _th, _check, _space2string +from bifrost.libbifrost import _bf, _th, _check, _array, _space2string from bifrost import device from bifrost.DataType import DataType from bifrost.Space import Space @@ -352,13 +352,47 @@ def view(self, dtype=None, type_=None): v.bf.dtype = dtype_bf v._update_BFarray() return v - #def astype(self, dtype): - # dtype_bf = DataType(dtype) - # dtype_np = dtype_bf.as_numpy_dtype() - # # TODO: This segfaults for cuda space; need type conversion support in backend - # a = super(ndarray, self).astype(dtype_np) - # a.bf.dtype = dtype_bf - # return a + def astype(self, dtype): + dtype_bf = DataType(dtype) + if space_accessible(self.bf.space, ['system']): + ## For arrays that can be accessed from the system space, use + ## numpy.ndarray.copy() to do the heavy lifting + dtype_np = dtype_bf.as_numpy_dtype() + if self.bf.space == 'cuda_managed': + ## TODO: Decide where/when these need to be called + device.stream_synchronize() + if dtype_bf.is_complex and dtype_bf.is_integer: + ## Catch for the complex integer types + a = ndarray(shape=self.shape, dtype=dtype_bf) + a['re'] = self.real.astype(dtype_bf.as_real()) + a['im'] = self.imag.astype(dtype_bf.as_real()) + else: + a = super(ndarray, self).astype(dtype_np) + a.bf.dtype = dtype_bf + else: + ## For arrays that can be access from CUDA, use bifrost.map + ## to do the heavy lifting + ## TODO: Would it be better to use quantize/unpack instead of map? + a = ndarray(shape=self.shape, dtype=dtype_bf, space=self.bf.space) + if dtype_bf.is_complex: + if self.bf.dtype.is_complex: + ## complex in -> complex out + func_string = b'a.real = b.real; a.imag = b.imag' + else: + ## real in -> complex out + func_string = b'a.real = b; a.imag = 0' + else: + if self.bf.dtype.is_complex: + ## complex in -> real out (plus the standard "drop imag part" warning) + np.ComplexWarning() + func_string = b'a = b.real' + else: + ## real in -> real out + func_string = b'a = b' + _check(_bf.bfMap(0, _array(None, dtype=ctypes.c_long), _array(None), + 2, _array([a.as_BFarray(), self.as_BFarray()]), _array(['a', 'b']), + None, func_string, None, _array(None), _array(None))) + return a def _system_accessible_copy(self): if space_accessible(self.bf.space, ['system']): return self diff --git a/src/trace.hpp b/src/trace.hpp index 13a127d0a..35aad52be 100644 --- a/src/trace.hpp +++ b/src/trace.hpp @@ -39,6 +39,7 @@ #include #include #include +#include #if BF_TRACE_ENABLED // Note: __PRETTY_FUNCTION__ is GCC-specific diff --git a/src/utils.hpp b/src/utils.hpp index db9b0d03b..32219dcd0 100644 --- a/src/utils.hpp +++ b/src/utils.hpp @@ -39,6 +39,7 @@ #include #include #include // For ::memcpy +#include #include #define BF_DTYPE_IS_COMPLEX(dtype) bool((dtype) & BF_DTYPE_COMPLEX_BIT) diff --git a/test/test_ndarray.py b/test/test_ndarray.py index 2ef006c9f..267ac7a68 100644 --- a/test/test_ndarray.py +++ b/test/test_ndarray.py @@ -31,6 +31,7 @@ import ctypes from bifrost.libbifrost_generated import BF_CUDA_ENABLED +from bifrost.DataType import DataType class NDArrayTest(unittest.TestCase): def setUp(self): @@ -152,6 +153,47 @@ def test_setitem(self): np.testing.assert_equal(g.copy('system'), np.array([[99,88],[2,3],[4,5]])) g[:,1] = [77,66,55] np.testing.assert_equal(g.copy('system'), np.array([[99,77],[2,66],[4,55]])) + def run_type_conversion(self, space='system'): + # Real + for dtype_in in (np.int8, np.int16, np.int32, np.float32, np.float64): + a = np.array(self.known_vals, dtype=dtype_in) + c = bf.ndarray(a, dtype=dtype_in, space=space) + for dtype in ('i8', 'i16', 'i32', 'i64', 'f64', 'ci8', 'ci16', 'ci32', 'cf32', 'cf64'): + np_dtype = DataType(dtype).as_numpy_dtype() + try: + ## Catch for the complex integer types + len(np_dtype) + b = np.zeros(a.shape, dtype=np_dtype) + b['re'] = a + except (IndexError, TypeError): + b = a.astype(np_dtype) + d = c.astype(dtype) + d = d.copy(space='system') + np.testing.assert_equal(b, d) + # Complex + for dtype_in,dtype_in_cmplx in zip((np.float32,np.float64), ('cf32', 'cf64')): + a = np.array(self.known_vals, dtype=dtype_in) + a = np.stack([a,a[::-1]], axis=0) + a = a.view(np.complex64) + c = bf.ndarray(a, dtype=dtype_in_cmplx, space=space) + for dtype in ('ci8', 'ci16', 'ci32', 'cf32', 'cf64', 'i8', 'i16', 'i32', 'i64', 'f64'): + np_dtype = DataType(dtype).as_numpy_dtype() + try: + ## Catch for the complex integer types + len(np_dtype) + b = np.zeros(a.shape, dtype=np_dtype) + b['re'] = a.real + b['im'] = a.imag + except (IndexError, TypeError): + b = a.astype(np_dtype) + d = c.astype(dtype) + d = d.copy(space='system') + np.testing.assert_equal(b, d) + def test_type_conversion(self): + self.run_type_conversion() + @unittest.skipUnless(BF_CUDA_ENABLED, "requires GPU support") + def test_space_type_conversion(self): + self.run_type_conversion(space='cuda') def test_BFarray(self): """ Test ndarray.as_BFarray() roundtrip """ a = bf.ndarray(np.arange(100), dtype='i32') @@ -162,4 +204,4 @@ def test_BFarray(self): a = bf.ndarray(np.arange(100), dtype='cf32') aa = a.as_BFarray() b = bf.ndarray(aa) - np.testing.assert_equal(a, b) \ No newline at end of file + np.testing.assert_equal(a, b) diff --git a/tutorial/00_getting_started.ipynb b/tutorial/00_getting_started.ipynb index 67642df8f..32f0e7cab 100644 --- a/tutorial/00_getting_started.ipynb +++ b/tutorial/00_getting_started.ipynb @@ -38,7 +38,7 @@ " try:\n", " import google.colab\n", " !sudo apt-get -qq install exuberant-ctags libopenblas-dev software-properties-common build-essential\n", - " !pip install -q contextlib2 pint simplejson scipy git+https://github.com/ctypesgen/ctypesgen.git\n", + " !pip install -q contextlib2 pint simplejson scipy ctypesgen==1.0.2\n", " ![ -d ~/bifrost/.git ] || git clone https://github.com/ledatelescope/bifrost ~/bifrost\n", " !(cd ~/bifrost && ./configure && make -j all && sudo make install)\n", " import bifrost\n", @@ -553,4 +553,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +}