diff --git a/.github/workflows/Test.yml b/.github/workflows/Test.yml index 3c04f0f5..6bc0b4aa 100644 --- a/.github/workflows/Test.yml +++ b/.github/workflows/Test.yml @@ -26,10 +26,11 @@ jobs: - uses: julia-actions/cache@v2 - name: Develop subpackages run: | - julia --project -e " + julia --project -e ' using Pkg + Pkg.develop("KernelAbstractions") Pkg.develop([PackageSpec(; name=basename(path), path) for path in ARGS]) - " lib/GPUArraysCore lib/JLArrays + ' lib/GPUArraysCore - uses: julia-actions/julia-runtest@v1 continue-on-error: ${{ matrix.version == 'nightly' }} - uses: julia-actions/julia-processcoverage@v1 diff --git a/Project.toml b/Project.toml index 95aa3f23..83dce68b 100644 --- a/Project.toml +++ b/Project.toml @@ -18,7 +18,7 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [compat] Adapt = "4.0" GPUArraysCore = "= 0.2.0" -KernelAbstractions = "0.9.28" +KernelAbstractions = "0.10" LLVM = "3.9, 4, 5, 6, 7, 8, 9" LinearAlgebra = "1" Printf = "1" diff --git a/docs/src/interface.md b/docs/src/interface.md index 9e4864ad..fbecfb63 100644 --- a/docs/src/interface.md +++ b/docs/src/interface.md @@ -20,7 +20,7 @@ end ``` -This will allow your defined type (in this case `JLArray`) to use the GPUArrays interface where available. +This will allow your defined type (in this case `CustomArray`) to use the GPUArrays interface where available. To be able to actually use the functionality that is defined for `AbstractGPUArray`s, you need to define the backend, like so: ```julia @@ -29,7 +29,7 @@ struct CustomBackend <: KernelAbstractions.GPU KernelAbstractions.get_backend(a::CA) where CA <: CustomArray = CustomBackend() ``` -There are numerous examples of potential interfaces for GPUArrays, such as with [JLArrays](https://github.com/JuliaGPU/GPUArrays.jl/blob/master/lib/JLArrays/src/JLArrays.jl), [CuArrays](https://github.com/JuliaGPU/CUDA.jl/blob/master/src/gpuarrays.jl), and [ROCArrays](https://github.com/JuliaGPU/AMDGPU.jl/blob/master/src/gpuarrays.jl). +There are numerous examples of potential interfaces for GPUArrays, such as with [CuArrays](https://github.com/JuliaGPU/CUDA.jl/blob/master/src/CUDAKernels.jl), [ROCArrays](https://github.com/JuliaGPU/AMDGPU.jl/blob/master/src/ROCKernels.jl), [MtlArrays](https://github.com/JuliaGPU/Metal.jl/blob/master/src/MetalKernels.jl). ## Caching Allocator diff --git a/docs/src/testsuite.md b/docs/src/testsuite.md index c953eff0..281a9f3d 100644 --- a/docs/src/testsuite.md +++ b/docs/src/testsuite.md @@ -21,7 +21,7 @@ If you don't want to run the whole suite, you can also run parts of it: ```julia -T = JLArray +T = Array # As of KernelAbstractions v0.10, Array uses POCLBackend to run KA kernels GPUArrays.allowscalar(false) # fail tests when slow indexing path into Array type is used. TestSuite.test_gpuinterface(T) # interface functions like gpu_call, threadidx, etc diff --git a/test/Project.toml b/test/Project.toml index 4e233cab..ebfa0a4d 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -2,7 +2,6 @@ Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" -JLArrays = "27aeb0d3-9eb9-45fb-866b-73c2ecf80fcb" KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" diff --git a/test/runtests.jl b/test/runtests.jl index 66d6a096..e16f7bb8 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,3 +1,6 @@ +using Pkg +Pkg.develop("KernelAbstractions") + using Distributed using Dates import REPL @@ -47,7 +50,7 @@ include("setup.jl") # make sure everything is precompiled # choose tests const tests = [] const test_runners = Dict() -for AT in (JLArray, Array), name in keys(TestSuite.tests) +for AT in (Array,), name in keys(TestSuite.tests) push!(tests, "$(AT)/$name") test_runners["$(AT)/$name"] = ()->TestSuite.tests[name](AT) end diff --git a/test/setup.jl b/test/setup.jl index 1e06e2f0..62e444a5 100644 --- a/test/setup.jl +++ b/test/setup.jl @@ -1,4 +1,4 @@ -using Distributed, Test, JLArrays +using Distributed, Test include("testsuite.jl") @@ -15,7 +15,7 @@ function runtests(f, name) # generate a temporary module to execute the tests in mod_name = Symbol("Test", rand(1:100), "Main_", replace(name, '/' => '_')) mod = @eval(Main, module $mod_name end) - @eval(mod, using Test, Random, JLArrays) + @eval(mod, using Test, Random) let id = myid() wait(@spawnat 1 print_testworker_started(name, id)) @@ -24,7 +24,6 @@ function runtests(f, name) ex = quote GC.gc(true) Random.seed!(1) - JLArrays.allowscalar(false) @timed @testset $"$name" begin $f()