Skip to content
This repository has been archived by the owner on Mar 12, 2021. It is now read-only.

Commit

Permalink
Warn about missing libraries.
Browse files Browse the repository at this point in the history
  • Loading branch information
maleadt committed Aug 20, 2019
1 parent c69f1e9 commit 7862981
Show file tree
Hide file tree
Showing 7 changed files with 57 additions and 66 deletions.
5 changes: 3 additions & 2 deletions src/CuArrays.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ let
Base.include_dependency(path)
end
@eval global const $lib = $path
@eval macro $lib() $lib === nothing ? error($"Your installation does not provide $lib, CuArrays.$(uppercase(name)) is unavailable") : $lib end
end
end

Expand Down Expand Up @@ -65,7 +66,7 @@ include("sparse/CUSPARSE.jl")
include("solver/CUSOLVER.jl")
include("fft/CUFFT.jl")
include("rand/CURAND.jl")
libcudnn !== nothing && include("dnn/CUDNN.jl")
include("dnn/CUDNN.jl")

include("nnlib.jl")

Expand All @@ -89,7 +90,7 @@ function __init__()
CUSOLVER._sparse_handle[] = C_NULL
CUSPARSE._handle[] = C_NULL
CURAND._generator[] = nothing
isdefined(CuArrays, :CUDNN) && (CUDNN._handle[] = C_NULL)
CUDNN._handle[] = C_NULL
end
push!(CUDAnative.device!_listeners, callback)

Expand Down
5 changes: 3 additions & 2 deletions src/dnn/CUDNN.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@ import CUDAapi
import CUDAdrv: CUDAdrv, CuContext, CuPtr, CU_NULL

using ..CuArrays
using ..CuArrays: libcudnn, active_context, unsafe_free!
using ..CuArrays: @libcudnn, active_context, unsafe_free!
using ..CuArrays: CuVecOrMat, CuVector

using NNlib
import NNlib: conv!, ∇conv_filter!, ∇conv_data!, stride, dilation, flipkernel,
maxpool!, meanpool!, ∇maxpool!, ∇meanpool!, spatial_dims, padding, kernel_size,
softmax, softmax!, ∇softmax!, logsoftmax, logsoftmax!, ∇logsoftmax
using CUDAnative

include("libcudnn_types.jl")
include("error.jl")

Expand Down
88 changes: 44 additions & 44 deletions src/dnn/libcudnn.jl

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions src/dnn/nnlib.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import ..CuArrays: CuVecOrMat, CuVector

import NNlib: conv!, ∇conv_filter!, ∇conv_data!,
maxpool!, meanpool!, ∇maxpool!, ∇meanpool!,
softmax, softmax!, ∇softmax!, logsoftmax, logsoftmax!, ∇logsoftmax
import ..CuArrays: CuVecOrMat, CuVector
using CUDAnative


# Softmax
Expand Down
12 changes: 0 additions & 12 deletions src/nnlib.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,3 @@ end
end

@cufunc softplus(x) = log1p(exp(x))

if !@isdefined CUDNN
function conv!(y::CuArray, x::CuArray, w::CuArray; kw...)
error("CUDNN is not installed.")
end
function softmax!(out::CuVecOrMat, xs::CuVecOrMat)
error("CUDNN is not installed.")
end
function logsoftmax!(out::CuVecOrMat, xs::CuVecOrMat)
error("CUDNN is not installed.")
end
end
2 changes: 1 addition & 1 deletion test/base.jl
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ end
@test testf(x -> log.(x), rand(3,3))
@test testf((x,xs) -> log.(x.+xs), Ref(1), rand(3,3))

if isdefined(CuArrays, :CUDNN)
if CuArrays.libcudnn !== nothing
using NNlib

@test testf(x -> logσ.(x), rand(5))
Expand Down
7 changes: 4 additions & 3 deletions test/dnn.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
@testset "CUDNN" begin

if !isdefined(CuArrays, :CUDNN)
using CuArrays.CUDNN

if CuArrays.libcudnn === nothing
@warn "Not testing CUDNN"
else
using CuArrays.CUDNN
@info "Testing CUDNN $(CUDNN.version())"

@testset "NNlib" begin
Expand Down Expand Up @@ -52,7 +53,7 @@ using CuArrays.CUDNN
@test testf((dy, y, x) -> ∇maxpool(dy, y, x, pdims), dy, y, x)
@test testf(x -> maxpool(x, pdims), x)
@test testf((dy, y, x) -> ∇maxpool(dy, y, x, pdims), dy, y, x)

# CPU implementation of ∇conv_bias!
db = zeros(Float64, 1, 1, 3, 1)
function CuArrays.CUDNN.∇conv_bias!(db, y)
Expand Down

0 comments on commit 7862981

Please sign in to comment.