Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
Johannes Nägele committed Oct 24, 2023
2 parents c2c0298 + 0d242d2 commit 29accd3
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
fail-fast: false
matrix:
version:
- '1.0'
- '1.6'
- '1.9'
- 'nightly'
os:
Expand Down
5 changes: 4 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@ MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"

[compat]
julia = "1"
Plots = "1"
MLDatasets = "0.7"
BenchmarkTools = "1"
julia = "1.6"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Expand Down
2 changes: 1 addition & 1 deletion src/Chain.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@ This function return the parameters of our layers.
However, it might make sense to change this implementation with the introduction of autodiff.
"""
function params(chain::Chain{T}) where T
return [params(layer) for layer in chain.layers]
return vcat([params(layer) for layer in chain.layers]...)
end
3 changes: 2 additions & 1 deletion src/Metrics.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
abstract type AbstractHook end
abstract type AbstractStage end

# TODO: define hooks and stages
# TODO: define hooks and stages
# example commit
4 changes: 1 addition & 3 deletions src/Optimizer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ The type/layout of params might get changed in the future.
"""
function update!(opt::BoringOptimizer, params, grad)
for i in eachindex(params)
for j in eachindex(params[i])
params[i][j] .-= opt.learning_rate .* grad[i][j]
end
params[i] .-= opt.learning_rate .* grad[i]
end
end
2 changes: 1 addition & 1 deletion src/Train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ function gradient_hardcoded(chain::Chain, loss, label, input::Vector{Vector{T}},
layer = chain.layers[i]
if layer isa AbstractLayer
δ .= δ .* layer.activation.(input[i + 1], derivative=true)
gs[i] = [δ * input[i]', δ]
gs[2 * i - 1], gs[2 * i] = δ * input[i]', δ
δ = layer.weights' * δ
else # if it is softmax: do nothing
error_msg = "This backpropagation method works only for a specific configuration."
Expand Down

0 comments on commit 29accd3

Please sign in to comment.