Skip to content

Use 5-argument mul! instead of mulαβ! #231

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Dec 23, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ os:
- linux
- osx
julia:
- 1.2
- 1.3
codecov: true
notifications:
Expand All @@ -15,7 +14,7 @@ jobs:
include:
- stage: "Documentation"
os: linux
julia: 1.2
julia: 1.3
script:
- julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd()));
Pkg.instantiate()'
Expand Down
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ StatsFuns = "0.8, 0.9"
StatsModels = "0.6"
Tables = "0.2"
TypedTables = "1"
julia = "1.2"
julia = "1.3"

[extras]
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Expand Down
5 changes: 1 addition & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

|**Documentation**|**Citation**|**Build Status**|**Code Coverage**|
|:-:|:-:|:-:|:-:|
|[![][docs-stable-img]][docs-stable-url] [![][docs-latest-img]][docs-latest-url] | [![][doi-img]][doi-url] | [![][travis-img]][travis-url] [![][appveyor-img]][appveyor-url] | [![][coveralls-img]][coveralls-url] [![][codecov-img]][codecov-url]|
|[![][docs-stable-img]][docs-stable-url] [![][docs-latest-img]][docs-latest-url] | [![][doi-img]][doi-url] | [![][travis-img]][travis-url] [![][appveyor-img]][appveyor-url] | [![][codecov-img]][codecov-url]|

[doi-img]: https://zenodo.org/badge/9106942.svg
[doi-url]: https://zenodo.org/badge/latestdoi/9106942
Expand All @@ -19,9 +19,6 @@
[appveyor-img]: https://ci.appveyor.com/api/projects/status/github/JuliaStats/MixedModels.jl?svg=true
[appveyor-url]: https://ci.appveyor.com/project/JuliaStats/mixedmodels-jl

[coveralls-img]: https://coveralls.io/repos/github/JuliaStats/MixedModels.jl/badge.svg?branch=master
[coveralls-url]: https://coveralls.io/github/JuliaStats/MixedModels.jl?branch=master

[codecov-img]: https://codecov.io/github/JuliaStats/MixedModels.jl/badge.svg?branch=master
[codecov-url]: https://codecov.io/github/JuliaStats/MixedModels.jl?branch=master

Expand Down
2 changes: 1 addition & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
environment:
matrix:
- julia_version: 1.2
- julia_version: 1.3
- julia_version: nightly

platform:
Expand Down
1 change: 0 additions & 1 deletion src/MixedModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ include("randomeffectsterm.jl")
include("linearmixedmodel.jl")
include("gausshermite.jl")
include("generalizedlinearmixedmodel.jl")
include("mixed.jl")
include("linalg/statschol.jl")
include("linalg/cholUnblocked.jl")
include("linalg/rankUpdate.jl")
Expand Down
34 changes: 19 additions & 15 deletions src/arraytypes.jl
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
using StaticArrays, SparseArrays, LinearAlgebra

"""
UniformBlockDiagonal{T}

Homogeneous block diagonal matrices. `k` diagonal blocks each of size `m×m`
"""
struct UniformBlockDiagonal{T} <: AbstractMatrix{T}
data::Array{T, 3}
data::Array{T,3}
facevec::Vector{SubArray{T,2,Array{T,3}}}
end

function UniformBlockDiagonal(dat::Array{T,3}) where {T}
UniformBlockDiagonal(dat,
SubArray{T,2,Array{T,3}}[view(dat,:,:,i) for i in 1:size(dat, 3)])
UniformBlockDiagonal(
dat,
SubArray{T,2,Array{T,3}}[view(dat, :, :, i) for i = 1:size(dat, 3)],
)
end

function Base.copyto!(dest::UniformBlockDiagonal{T}, src::UniformBlockDiagonal{T}) where{T}
function Base.copyto!(dest::UniformBlockDiagonal{T}, src::UniformBlockDiagonal{T}) where {T}
sdat = src.data
ddat = dest.data
size(ddat) == size(sdat) || throw(DimensionMismatch(""))
Expand All @@ -28,13 +28,13 @@ function Base.copyto!(dest::Matrix{T}, src::UniformBlockDiagonal{T}) where {T}
fill!(dest, zero(T))
sdat = src.data
m, n, l = size(sdat)
for k in 1:l
for k = 1:l
ioffset = (k - 1) * m
joffset = (k - 1) * n
for j in 1:n
for j = 1:n
jind = joffset + j
for i in 1:m
dest[ioffset + i, jind] = sdat[i,j,k]
for i = 1:m
dest[ioffset+i, jind] = sdat[i, j, k]
end
end
end
Expand All @@ -45,7 +45,7 @@ function Base.getindex(A::UniformBlockDiagonal{T}, i::Int, j::Int) where {T}
Ad = A.data
m, n, l = size(Ad)
(0 < i ≤ l * m && 0 < j ≤ l * n) ||
throw(IndexError("attempt to access $(l*m) × $(l*n) array at index [$i, $j]"))
throw(IndexError("attempt to access $(l*m) × $(l*n) array at index [$i, $j]"))
iblk, ioffset = divrem(i - 1, m)
jblk, joffset = divrem(j - 1, n)
iblk == jblk ? Ad[ioffset+1, joffset+1, iblk+1] : zero(T)
Expand All @@ -54,15 +54,15 @@ end
function LinearAlgebra.Matrix(A::UniformBlockDiagonal{T}) where {T}
Ad = A.data
m, n, l = size(Ad)
mat = zeros(T, (m*l, n*l))
mat = zeros(T, (m * l, n * l))
@inbounds for k = 0:(l-1)
kp1 = k + 1
km = k * m
kn = k * n
for j = 1:n
knpj = kn + j
for i = 1:m
mat[km + i, knpj] = Ad[i, j, kp1]
mat[km+i, knpj] = Ad[i, j, kp1]
end
end
end
Expand All @@ -81,7 +81,10 @@ A `SparseMatrixCSC` whose nonzeros form blocks of rows or columns or both.

# Members
* `cscmat`: `SparseMatrixCSC{Tv, Int32}` representation for general calculations
* `blkpattern`: `SparseMatrixCSC{Bool,Int32}` pattern of blocks of size (S,P)
* `nzasmat`: nonzeros of `cscmat` as a dense matrix
* `colblkptr`: pattern of blocks of columns

The only time these are created are as products of `ReMat`s.
"""
mutable struct BlockedSparse{T,S,P} <: AbstractMatrix{T}
cscmat::SparseMatrixCSC{T,Int32}
Expand All @@ -102,7 +105,8 @@ SparseArrays.sparse(A::BlockedSparse) = A.cscmat
SparseArrays.nnz(A::BlockedSparse) = nnz(A.cscmat)

function Base.copyto!(L::BlockedSparse{T}, A::SparseMatrixCSC{T}) where {T}
size(L) == size(A) && nnz(L) == nnz(A) || throw(DimensionMismatch("size(L) ≠ size(A) or nnz(L) ≠ nnz(A"))
size(L) == size(A) && nnz(L) == nnz(A) ||
throw(DimensionMismatch("size(L) ≠ size(A) or nnz(L) ≠ nnz(A"))
copyto!(nonzeros(L.cscmat), nonzeros(A))
L
end
2 changes: 1 addition & 1 deletion src/femat.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ Base.size(A::FeMat, i) = size(A.wtx, i)
Base.copyto!(A::FeMat{T}, src::AbstractVecOrMat{T}) where {T} = copyto!(A.x, src)

*(adjA::Adjoint{T,<:FeMat{T}}, B::FeMat{T}) where {T} =
fullrankwtx(adjA.parent)'fullrankwtx(B)
fullrankwtx(adjA.parent)' * fullrankwtx(B)

LinearAlgebra.mul!(R::StridedVecOrMat{T}, A::FeMat{T}, B::StridedVecOrMat{T}) where {T} =
mul!(R, A.x, B)
Expand Down
34 changes: 19 additions & 15 deletions src/gausshermite.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
using StaticArrays, LinearAlgebra

"""
GaussHermiteQuadrature

Expand Down Expand Up @@ -28,8 +26,8 @@ gn5 = GHnorm(5)
sum(@. abs2(σ*gn5.z + μ)*gn5.w) # E[X^2] where X ∼ N(μ, σ)
```

For evaluation of the log-likelihood of a GLMM the integral to evaluate for each level of the grouping
factor is approximately Gaussian shaped.
For evaluation of the log-likelihood of a GLMM the integral to evaluate for each level of
the grouping factor is approximately Gaussian shaped.
"""
GaussHermiteQuadrature
"""
Expand All @@ -40,18 +38,20 @@ A struct with 2 SVector{K,Float64} members
- `wt`: Gauss-Hermite weights normalized to sum to unity
"""
struct GaussHermiteNormalized{K}
z::SVector{K, Float64}
z::SVector{K,Float64}
w::SVector{K,Float64}
end
function GaussHermiteNormalized(k::Integer)
ev = eigen(SymTridiagonal(zeros(k), sqrt.(1:k-1)))
w = abs2.(ev.vectors[1,:])
w = abs2.(ev.vectors[1, :])
GaussHermiteNormalized(
SVector{k}((ev.values .- reverse(ev.values)) ./ 2),
SVector{k}(LinearAlgebra.normalize((w .+ reverse(w)) ./ 2, 1)))
SVector{k}(LinearAlgebra.normalize((w .+ reverse(w)) ./ 2, 1)),
)
end

Base.iterate(g::GaussHermiteNormalized{K}, i=1) where {K} = (K < i ? nothing : ((z = g.z[i], w = g.w[i]), i + 1))
Base.iterate(g::GaussHermiteNormalized{K}, i = 1) where {K} =
(K < i ? nothing : ((z = g.z[i], w = g.w[i]), i + 1))

Base.length(g::GaussHermiteNormalized{K}) where {K} = K

Expand All @@ -61,10 +61,13 @@ Base.length(g::GaussHermiteNormalized{K}) where {K} = K
Memoized values of `GHnorm`{@ref} stored as a `Dict{Int,GaussHermiteNormalized}`
"""
const GHnormd = Dict{Int,GaussHermiteNormalized}(
1 => GaussHermiteNormalized(SVector{1}(0.),SVector{1}(1.)),
2 => GaussHermiteNormalized(SVector{2}(-1.0,1.0),SVector{2}(0.5,0.5)),
3 => GaussHermiteNormalized(SVector{3}(-sqrt(3),0.,sqrt(3)),SVector{3}(1/6,2/3,1/6))
)
1 => GaussHermiteNormalized(SVector{1}(0.0), SVector{1}(1.0)),
2 => GaussHermiteNormalized(SVector{2}(-1.0, 1.0), SVector{2}(0.5, 0.5)),
3 => GaussHermiteNormalized(
SVector{3}(-sqrt(3), 0.0, sqrt(3)),
SVector{3}(1 / 6, 2 / 3, 1 / 6),
),
)

"""
GHnorm(k::Int)
Expand All @@ -74,7 +77,8 @@ Return the (unique) GaussHermiteNormalized{k} object.
The function values are stored (memoized) when first evaluated. Subsequent evaluations
for the same `k` have very low overhead.
"""
GHnorm(k::Int) = get!(GHnormd, k) do
GaussHermiteNormalized(k)
end
GHnorm(k::Int) =
get!(GHnormd, k) do
GaussHermiteNormalized(k)
end
GHnorm(k) = GHnorm(Int(k))
Loading