diff --git a/Project.toml b/Project.toml index d02f6b4..83c4738 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "StridedViews" uuid = "4db3bf67-4bd7-4b4e-b153-31dc3fb37143" authors = ["Lukas Devos ", "Jutho Haegeman "] -version = "0.3.2" +version = "0.4.0" [deps] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" diff --git a/README.md b/README.md index 4ba68f6..e5b6a6a 100644 --- a/README.md +++ b/README.md @@ -50,3 +50,10 @@ subsequent dimensions `i` and `i+1` can only be joined if `stride(A,i+1) == size(A,i)*stride(A,i)`. Instead of overloading `reshape`, Strided.jl provides a separate function `sreshape` which returns a `StridedView` over the same parent data, or throws a runtime error if this is impossible. + +### News + +Since StridedViews v0.4.0, the `StridedView` type attempts to generate less specializations +by normalizing the parent array type. In particular, for `DenseArray` parents we attempt to +reshape the parent array to a vector, and for `Memory`-based arrays (Julia v1.11+) we unpack +the `Memory` object directly. This should improve compile times. diff --git a/src/auxiliary.jl b/src/auxiliary.jl index 78a5be4..873a356 100644 --- a/src/auxiliary.jl +++ b/src/auxiliary.jl @@ -45,6 +45,13 @@ function _normalizestrides(size::Dims{N}, strides::Dims{N}) where {N} return strides end +# 'Normalize' the layout of a DenseArray, in order to reduce the number of required +# specializations in functions. +@static if isdefined(Core, :Memory) + @inline _normalizeparent(A::Array) = A.ref.mem +end +@inline _normalizeparent(A::DenseArray) = reshape(A, length(A)) + # Auxiliary methods for `sview` #------------------------------ # Compute the new dimensions of a strided view given the original size and the view slicing diff --git a/src/stridedview.jl b/src/stridedview.jl index 03e2abc..2a5c3e5 100644 --- a/src/stridedview.jl +++ b/src/stridedview.jl @@ -34,13 +34,15 @@ end # Constructors #-------------- -function StridedView(parent::A, +function StridedView(parent::DenseArray, size::NTuple{N,Int}=size(parent), strides::NTuple{N,Int}=strides(parent), offset::Int=0, - op::F=identity) where {A<:DenseArray,N,F} + op::F=identity) where {N,F} T = Base.promote_op(op, eltype(parent)) - return StridedView{T,N,A,F}(parent, size, _normalizestrides(size, strides), offset, op) + parent′ = _normalizeparent(parent) + strides′ = _normalizestrides(size, strides) + return StridedView{T,N,typeof(parent′),F}(parent′, size, strides′, offset, op) end StridedView(a::StridedView) = a diff --git a/test/runtests.jl b/test/runtests.jl index 5f3d284..c8d7791 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -13,7 +13,7 @@ Random.seed!(1234) @test isstrided(A1) @test isstrided(B1) @test C1 === B1 - @test parent(B1) === A1 + @test parent(B1) == reshape(A1, :) @test Base.elsize(B1) == Base.elsize(A1) for op1 in (identity, conj, transpose, adjoint) if op1 == transpose || op1 == adjoint