Skip to content

Commit

Permalink
add missing file
Browse files Browse the repository at this point in the history
  • Loading branch information
Red-Portal committed Aug 10, 2024
1 parent 276b215 commit 1d79817
Showing 1 changed file with 86 additions and 0 deletions.
86 changes: 86 additions & 0 deletions src/optimization/optrules.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@

"""
DoWG(repsilon = 1e-8)
[DoWG](https://arxiv.org/abs/2305.16284) optimizer. It's only parameter is the
initial guess of the Euclidean distance to the optimum repsilon.
The [DoG](https://arxiv.org/abs/2302.12022) paper recommends 1e-4*(1 + norm(x0)).
# Parameters
- repsilon: Initial guess of the Euclidean distance between the initial point and
the optimum.
"""
Optimisers.@def struct DoWG <: Optimisers.AbstractRule
repsilon = 1e-8
end

Optimisers.init(o::DoWG, x::AbstractArray{T}) where {T} = (copy(x), zero(T), T(o.repsilon))

Check warning on line 17 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L17

Added line #L17 was not covered by tests

function Optimisers.apply!(::DoWG, state, x::AbstractArray{T}, dx) where {T}
x0, v, r = state

Check warning on line 20 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L19-L20

Added lines #L19 - L20 were not covered by tests

r = max(sqrt(sum(abs2, x - x0)), r)
r2 = r * r
v = v + r2 * sum(abs2, dx)
η = r2 / sqrt(v)
dx′ = Optimisers.@lazy dx * η
return (x0, v, r), dx′

Check warning on line 27 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L22-L27

Added lines #L22 - L27 were not covered by tests
end

"""
DoG(repsilon = 1e-8)
[DoG](https://arxiv.org/abs/2305.16284) optimizer. It's only parameter is the
initial guess of the Euclidean distance to the optimum repsilon.
The [DoG](https://arxiv.org/abs/2302.12022) paper recommends 1e-4*(1 + norm(x0)).
# Parameters
- repsilon: Initial guess of the Euclidean distance between the initial point and
the optimum.
"""

Optimisers.@def struct DoG <: Optimisers.AbstractRule
repsilon = 1e-8
end

Optimisers.init(o::DoG, x::AbstractArray{T}) where {T} = (copy(x), zero(T), T(o.repsilon))

Check warning on line 46 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L46

Added line #L46 was not covered by tests

function Optimisers.apply!(::DoG, state, x::AbstractArray{T}, dx) where {T}
x0, v, r = state

Check warning on line 49 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L48-L49

Added lines #L48 - L49 were not covered by tests

r = max(sqrt(sum(abs2, x - x0)), r)
v = v + sum(abs2, dx)
η = r / sqrt(v)
dx′ = Optimisers.@lazy dx * η
return (x0, v, r), dx′

Check warning on line 55 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L51-L55

Added lines #L51 - L55 were not covered by tests
end

"""
COCOB(α = 100)
[Continuous Coin Betting](https://arxiv.org/abs/1705.07795) optimizer.
It's only parameter is the maximum change per parameter α, which shouldn't need much tuning.
The paper suggests α = 100 as a generally default value.
# Parameters
- alpha (α): Scaling parameter.
"""
Optimisers.@def struct COCOB <: Optimisers.AbstractRule
alpha = 100
end

function Optimisers.init(::COCOB, x::AbstractArray{T}) where {T}
return (zero(x), zero(x), zero(x), zero(x), copy(x))

Check warning on line 73 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L72-L73

Added lines #L72 - L73 were not covered by tests
end

function Optimisers.apply!(o::COCOB, state, x::AbstractArray{T}, dx) where {T}
α = T(o.alpha)
L, G, R, θ, x1 = state

Check warning on line 78 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L76-L78

Added lines #L76 - L78 were not covered by tests

Optimisers.@.. L = max(L, abs(dx))
Optimisers.@.. G = G + abs(dx)
Optimisers.@.. R = max(R + (x - x1) * -dx, 0)
Optimisers.@.. θ = θ + -dx
dx′ = Optimisers.@lazy -(x1 - x) -/ (L * max(G + L, α * L)) * (L + R))
return (L, G, R, θ, x1), dx′

Check warning on line 85 in src/optimization/optrules.jl

View check run for this annotation

Codecov / codecov/patch

src/optimization/optrules.jl#L80-L85

Added lines #L80 - L85 were not covered by tests
end

0 comments on commit 1d79817

Please sign in to comment.