diff --git a/src/architecture.jl b/src/architecture.jl index 7521c6bea..6df78881c 100644 --- a/src/architecture.jl +++ b/src/architecture.jl @@ -64,7 +64,6 @@ virtual_get_device(::VirtualSerial) = VirtualCPU(nothing, 1) virtual_get_task(::VirtualSerial) = nothing - struct CPUThread{Parent} <: AbstractTask tid::Int dev::CPU @@ -157,5 +156,6 @@ function moveto(vec::Vector, task::CPUThread) end function moveto(vec::CPULocalVector, task::CPUThread) - return vec.data[task.tid] + temp = vec.data[task.tid] + return temp end diff --git a/src/interface/abstractarrays.jl b/src/interface/abstractarrays.jl index a614c9ba0..5fa2a3e44 100644 --- a/src/interface/abstractarrays.jl +++ b/src/interface/abstractarrays.jl @@ -115,4 +115,5 @@ Base.setindex!(arr::AsArray{T, N}, v, i::Vararg{Int, N}) where {T, N} = arr.fbr[ Base.setindex!(arr::AsArray{T, N}, v, i::Vararg{Any, N}) where {T, N} = arr.fbr[i...] = v is_injective(ctx, tns::VirtualAbstractArray) = [true for _ in tns.ndims] -is_atomic(ctx, tns::VirtualAbstractArray) = true \ No newline at end of file +is_atomic(ctx, tns::VirtualAbstractArray) = [false, [false for _ in tns.ndims]...] +# is_atomic(ctx, tns::VirtualAbstractArray) = true diff --git a/src/looplets/unfurl.jl b/src/looplets/unfurl.jl index e618926b8..8bc446162 100644 --- a/src/looplets/unfurl.jl +++ b/src/looplets/unfurl.jl @@ -26,4 +26,7 @@ function unfurl(ctx, tns::Furlable, ext, mode, protos...) end unfurl(ctx, tns, ext, mode, protos...) = tns -instantiate(ctx, tns::Furlable, mode, protos) = tns \ No newline at end of file +instantiate(ctx, tns::Furlable, mode, protos) = tns +is_injective(ctx, tns::Furlable) = is_injective(ctx, tns.body) +is_atomic(ctx, tns::Furlable) = is_atomic(ctx, tns.body) +is_concurrent(ctx, tns::Furlable) = is_concurrent(ctx, tns.body) \ No newline at end of file diff --git a/src/tensors/combinators/offset.jl b/src/tensors/combinators/offset.jl index bdeec466c..4bc6ec119 100644 --- a/src/tensors/combinators/offset.jl +++ b/src/tensors/combinators/offset.jl @@ -17,6 +17,7 @@ end is_injective(ctx, lvl::VirtualOffsetArray) = is_injective(ctx, lvl.body) is_atomic(ctx, lvl::VirtualOffsetArray) = is_atomic(ctx, lvl.body) +is_concurrent(ctx, lvl::VirtualOffsetArray) = is_concurrent(ctx, lvl.body) Base.show(io::IO, ex::VirtualOffsetArray) = Base.show(io, MIME"text/plain"(), ex) function Base.show(io::IO, mime::MIME"text/plain", ex::VirtualOffsetArray) diff --git a/src/tensors/combinators/permissive.jl b/src/tensors/combinators/permissive.jl index d800fc3e5..59978e167 100644 --- a/src/tensors/combinators/permissive.jl +++ b/src/tensors/combinators/permissive.jl @@ -19,6 +19,8 @@ end is_injective(ctx, lvl::VirtualPermissiveArray) = is_injective(ctx, lvl.body) is_atomic(ctx, lvl::VirtualPermissiveArray) = is_atomic(ctx, lvl.body) +is_concurrent(ctx, lvl::VirtualPermissiveArray) = is_concurrent(ctx, lvl.body) + Base.show(io::IO, ex::VirtualPermissiveArray) = Base.show(io, MIME"text/plain"(), ex) function Base.show(io::IO, mime::MIME"text/plain", ex::VirtualPermissiveArray) diff --git a/src/tensors/combinators/product.jl b/src/tensors/combinators/product.jl index d8badac20..b3119a45a 100644 --- a/src/tensors/combinators/product.jl +++ b/src/tensors/combinators/product.jl @@ -25,7 +25,10 @@ function is_concurrent(ctx, lvl::VirtualProductArray) sub = is_concurrent(ctx, lvl.body) return [sub[1:lvl.dim]..., false, sub[lvl.dim + 1:end]...] end -is_atomic(ctx, lvl::VirtualProductArray) = is_atomic(ctx, lvl.body) +function is_atomic(ctx, lvl::VirtualProductArray) + (below, overall) = is_atomic(ctx, lvl.body) + return ([below[1:lvl.dim]..., below[lvl.dim] && below[lvl.dim+1], below[lvl.dim + 1:end]... ], overall) +end Base.show(io::IO, ex::VirtualProductArray) = Base.show(io, MIME"text/plain"(), ex) function Base.show(io::IO, mime::MIME"text/plain", ex::VirtualProductArray) diff --git a/src/tensors/combinators/protocolized.jl b/src/tensors/combinators/protocolized.jl index e7ce79ad2..708b88dc0 100644 --- a/src/tensors/combinators/protocolized.jl +++ b/src/tensors/combinators/protocolized.jl @@ -17,6 +17,8 @@ end is_injective(ctx, lvl::VirtualProtocolizedArray) = is_injective(ctx, lvl.body) is_atomic(ctx, lvl::VirtualProtocolizedArray) = is_atomic(ctx, lvl.body) +is_concurrent(ctx, lvl::VirtualProtocolizedArray) = is_concurrent(ctx, lvl.body) + Base.:(==)(a::VirtualProtocolizedArray, b::VirtualProtocolizedArray) = a.body == b.body && a.protos == b.protos diff --git a/src/tensors/combinators/roots.jl b/src/tensors/combinators/roots.jl index dcc809efe..9b8b8d9ad 100644 --- a/src/tensors/combinators/roots.jl +++ b/src/tensors/combinators/roots.jl @@ -30,6 +30,7 @@ lower_access(ctx::AbstractCompiler, node, tns::FinchNode) = is_injective(ctx, lvl::FinchNode) = is_injective(ctx, resolve(ctx, lvl)) is_atomic(ctx, lvl::FinchNode) = is_atomic(ctx, resolve(ctx, lvl)) +is_concurrent(ctx, lvl::FinchNode) = is_concurrent(ctx, resolve(ctx, lvl)) function getroot(node::FinchNode) if node.kind === virtual diff --git a/src/tensors/combinators/scale.jl b/src/tensors/combinators/scale.jl index a78e9ece3..a6cfcda36 100644 --- a/src/tensors/combinators/scale.jl +++ b/src/tensors/combinators/scale.jl @@ -17,6 +17,8 @@ end is_injective(ctx, lvl::VirtualScaleArray) = is_injective(ctx, lvl.body) is_atomic(ctx, lvl::VirtualScaleArray) = is_atomic(ctx, lvl.body) +is_concurrent(ctx, lvl::VirtualScaleArray) = is_concurrent(ctx, lvl.body) + Base.show(io::IO, ex::VirtualScaleArray) = Base.show(io, MIME"text/plain"(), ex) function Base.show(io::IO, mime::MIME"text/plain", ex::VirtualScaleArray) diff --git a/src/tensors/combinators/toeplitz.jl b/src/tensors/combinators/toeplitz.jl index 1dff1bd06..4c061ac9a 100644 --- a/src/tensors/combinators/toeplitz.jl +++ b/src/tensors/combinators/toeplitz.jl @@ -28,7 +28,15 @@ function is_injective(ctx, lvl::VirtualToeplitzArray) sub = is_injective(ctx, lvl.body) return [sub[1:lvl.dim]..., false, sub[lvl.dim + 1:end]...] end -is_atomic(ctx, lvl::VirtualToeplitzArray) = is_atomic(ctx, lvl.body) +function is_atomic(ctx, lvl::VirtualToeplitzArray) + (below, overall) = is_atomic(ctx, lvl.body) + newBelow = [below[1:lvl.dim]..., below[lvl.dim] && below[lvl.dim + 1], below[lvl.dim + 1:end]...] + return (newBelow, overall) +end +function is_concurrent(ctx, lvl::VirtualToeplitzArray) + sub = is_concurrent(ctx, lvl.body) + return [sub[1:lvl.dim]..., false, sub[lvl.dim + 1:end]...] +end Base.show(io::IO, ex::VirtualToeplitzArray) = Base.show(io, MIME"text/plain"(), ex) function Base.show(io::IO, mime::MIME"text/plain", ex::VirtualToeplitzArray) diff --git a/src/tensors/combinators/unfurled.jl b/src/tensors/combinators/unfurled.jl index 9769c2c77..0927df776 100644 --- a/src/tensors/combinators/unfurled.jl +++ b/src/tensors/combinators/unfurled.jl @@ -124,6 +124,7 @@ getroot(tns::Unfurled) = getroot(tns.arr) is_injective(ctx, lvl::Unfurled) = is_injective(ctx, lvl.arr) is_atomic(ctx, lvl::Unfurled) = is_atomic(ctx, lvl.arr) +is_concurrent(ctx, lvl::Unfurled) = is_concurrent(ctx, lvl.arr) function lower_access(ctx::AbstractCompiler, node, tns::Unfurled) if !isempty(node.idxs) diff --git a/src/tensors/combinators/windowed.jl b/src/tensors/combinators/windowed.jl index 4fbc0d95f..435e0b9a4 100644 --- a/src/tensors/combinators/windowed.jl +++ b/src/tensors/combinators/windowed.jl @@ -17,6 +17,7 @@ end is_injective(ctx, lvl::VirtualWindowedArray) = is_injective(ctx, lvl.body) is_atomic(ctx, lvl::VirtualWindowedArray) = is_atomic(ctx, lvl.body) +is_concurrent(ctx, lvl::VirtualWindowedArray) = is_concurrent(ctx, lvl.body) Base.show(io::IO, ex::VirtualWindowedArray) = Base.show(io, MIME"text/plain"(), ex) function Base.show(io::IO, mime::MIME"text/plain", ex::VirtualWindowedArray) diff --git a/src/tensors/fibers.jl b/src/tensors/fibers.jl index 130d90a76..6ac3cf4e2 100644 --- a/src/tensors/fibers.jl +++ b/src/tensors/fibers.jl @@ -69,6 +69,8 @@ mutable struct VirtualFiber{Lvl} <: AbstractVirtualFiber{Lvl} end is_injective(ctx, tns::VirtualFiber) = is_level_injective(ctx, tns.lvl) +is_concurrent(ctx, tns::VirtualFiber) = is_level_concurrent(ctx, tns.lvl)[1] + is_atomic(ctx, tns::VirtualFiber) = is_level_atomic(ctx, tns.lvl) function virtualize(ctx, ex, ::Type{<:Tensor{Lvl}}, tag=freshen(ctx, :tns)) where {Lvl} diff --git a/src/tensors/levels/abstractlevel.jl b/src/tensors/levels/abstractlevel.jl index 18909a81c..fb2edbbf7 100644 --- a/src/tensors/levels/abstractlevel.jl +++ b/src/tensors/levels/abstractlevel.jl @@ -1,6 +1,9 @@ abstract type AbstractLevel end abstract type AbstractVirtualLevel end +virtual_level_ndims(lvl:: AbstractVirtualLevel, ctx) = length(virtual_level_size(lvl, ctx)) + + #is_laminable_updater(lvl::AbstractVirtualLevel, ctx, ::Union{::typeof(defaultread), ::typeof(walk), ::typeof(gallop), ::typeof(follow), typeof(defaultupdate), typeof(laminate), typeof(extrude)}, protos...) = false #is_laminable_updater(lvl::AbstractVirtualLevel, ctx) = false diff --git a/src/tensors/levels/atomiclevels.jl b/src/tensors/levels/atomiclevels.jl index 98023131b..8c61a03b7 100644 --- a/src/tensors/levels/atomiclevels.jl +++ b/src/tensors/levels/atomiclevels.jl @@ -93,9 +93,15 @@ postype(lvl:: AtomicLevel) = postype(lvl.lvl) postype(lvl:: VirtualAtomicLevel) = postype(lvl.lvl) -is_level_injective(ctx, lvl::VirtualAtomicLevel) = [is_level_injective(ctx, lvl.lvl)..., true] -is_level_concurrent(ctx, lvl::VirtualAtomicLevel) = [is_level_concurrent(ctx, lvl.lvl)..., true] -is_level_atomic(ctx, lvl::VirtualAtomicLevel) = true +is_level_injective(ctx, lvl::VirtualAtomicLevel) = [is_level_injective(ctx, lvl.lvl)...] +function is_level_concurrent(ctx, lvl::VirtualAtomicLevel) + (below, c) = is_level_concurrent(ctx, lvl.lvl) + return (below, c) +end +function is_level_atomic(ctx, lvl::VirtualAtomicLevel) + (below, _) = is_level_atomic(ctx, lvl.lvl) + return (below, true) +end function lower(ctx::AbstractCompiler, lvl::VirtualAtomicLevel, ::DefaultStyle) quote @@ -118,7 +124,7 @@ end Base.summary(lvl::VirtualAtomicLevel) = "Atomic($(lvl.Lvl))" virtual_level_resize!(ctx, lvl::VirtualAtomicLevel, dims...) = (lvl.lvl = virtual_level_resize!(ctx, lvl.lvl, dims...); lvl) virtual_level_size(ctx, lvl::VirtualAtomicLevel) = virtual_level_size(ctx, lvl.lvl) -virtual_level_size(ctx, x) = error(string("Not defined for", x)) +virtual_level_ndims(ctx, lvl::VirtualAtomicLevel) = length(virtual_level_size(ctx, lvl.lvl)) virtual_level_eltype(lvl::VirtualAtomicLevel) = virtual_level_eltype(lvl.lvl) virtual_level_default(lvl::VirtualAtomicLevel) = virtual_level_default(lvl.lvl) @@ -203,17 +209,21 @@ function instantiate(ctx, fbr::VirtualSubFiber{VirtualAtomicLevel}, mode::Update lockVal = freshen(ctx.code, lvl.ex, :lockVal) dev = lower(ctx, virtual_get_device(ctx.code.task), DefaultStyle()) return Thunk( + + body = (ctx) -> begin preamble = quote $atomicData = get_lock($dev, $(lvl.locks), $(ctx(pos)), eltype($(lvl.AVal))) $lockVal = aquire_lock!($dev, $atomicData) - end, - body = (ctx) -> begin + end + epilogue = quote + release_lock!($dev, $atomicData) end + push!(ctx.code.preamble, preamble) + push!(ctx.code.epilogue, epilogue) lvl_2 = lvl.lvl update = instantiate(ctx, VirtualSubFiber(lvl_2, pos), mode, protos) return update end, - epilogue = quote - release_lock!($dev, $atomicData) end + ) end function instantiate(ctx, fbr::VirtualHollowSubFiber{VirtualAtomicLevel}, mode::Updater, protos) @@ -223,16 +233,19 @@ function instantiate(ctx, fbr::VirtualHollowSubFiber{VirtualAtomicLevel}, mode:: lockVal = freshen(ctx.code, lvl.ex, :lockVal) dev = lower(ctx, virtual_get_device(ctx.code.task), DefaultStyle()) return Thunk( + + body = (ctx) -> begin preamble = quote $atomicData = get_lock($dev, $(lvl.locks), $(ctx(pos)), eltype($(lvl.AVal))) $lockVal = aquire_lock!($dev, $atomicData) - end, - body = (ctx) -> begin + end + epilogue = quote + release_lock!($dev, $atomicData) end + push!(ctx.code.preamble, preamble) + push!(ctx.code.epilogue, epilogue) lvl_2 = lvl.lvl update = instantiate(ctx, VirtualHollowSubFiber(lvl_2, pos, fbr.dirty), mode, protos) return update - end, - epilogue = quote - release_lock!($dev, $atomicData) end + end ) end \ No newline at end of file diff --git a/src/tensors/levels/denselevels.jl b/src/tensors/levels/denselevels.jl index 26a977b60..e453b6f99 100644 --- a/src/tensors/levels/denselevels.jl +++ b/src/tensors/levels/denselevels.jl @@ -107,7 +107,14 @@ mutable struct VirtualDenseLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualDenseLevel) = [is_level_injective(ctx, lvl.lvl)..., true] -is_level_atomic(ctx, lvl::VirtualDenseLevel) = is_level_atomic(ctx, lvl.lvl) +function is_level_atomic(ctx, lvl::VirtualDenseLevel) + (data, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([data; atomic], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualDenseLevel) + (data, concurrent) = is_level_concurrent(ctx, lvl.lvl) + return ([data; concurrent], concurrent) +end function virtualize(ctx, ex, ::Type{DenseLevel{Ti, Lvl}}, tag=:lvl) where {Ti, Lvl} sym = freshen(ctx, tag) @@ -206,3 +213,5 @@ function instantiate(ctx, trv::DenseTraversal, mode, subprotos, ::Union{typeof(d ) ) end + + diff --git a/src/tensors/levels/denserlelevels.jl b/src/tensors/levels/denserlelevels.jl index f5d3375ee..3eea56d3e 100644 --- a/src/tensors/levels/denserlelevels.jl +++ b/src/tensors/levels/denserlelevels.jl @@ -145,8 +145,14 @@ mutable struct VirtualDenseRLELevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualDenseRLELevel) = [false, is_level_injective(ctx, lvl.lvl)...] -is_level_concurrent(ctx, lvl::VirtualDenseRLELevel) = [false, is_level_concurrent(ctx, lvl.lvl)...] -is_level_atomic(ctx, lvl::VirtualDenseRLELevel) = false +function is_level_atomic(ctx, lvl::VirtualDenseRLELevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualDenseRLELevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end postype(lvl::VirtualDenseRLELevel) = postype(lvl.lvl) diff --git a/src/tensors/levels/elementlevels.jl b/src/tensors/levels/elementlevels.jl index f68f9c0e0..dbe1fe284 100644 --- a/src/tensors/levels/elementlevels.jl +++ b/src/tensors/levels/elementlevels.jl @@ -88,7 +88,10 @@ mutable struct VirtualElementLevel <: AbstractVirtualLevel end is_level_injective(ctx, ::VirtualElementLevel) = [] -is_level_atomic(ctx, lvl::VirtualElementLevel) = false +is_level_atomic(ctx, lvl::VirtualElementLevel) = ([false], false) +function is_level_concurrent(ctx, lvl::VirtualElementLevel) + return ([], true) +end lower(ctx::AbstractCompiler, lvl::VirtualElementLevel, ::DefaultStyle) = lvl.ex diff --git a/src/tensors/levels/patternlevels.jl b/src/tensors/levels/patternlevels.jl index 2c3316bcb..d9401e0b3 100644 --- a/src/tensors/levels/patternlevels.jl +++ b/src/tensors/levels/patternlevels.jl @@ -85,7 +85,10 @@ function virtual_moveto_level(ctx::AbstractCompiler, lvl::VirtualPatternLevel, a end is_level_injective(ctx, ::VirtualPatternLevel) = [] -is_level_atomic(ctx, lvl::VirtualPatternLevel) = true +is_level_atomic(ctx, lvl::VirtualPatternLevel) = ([false], false) +function is_level_concurrent(ctx, lvl::VirtualPatternLevel) + return ([], true) +end lower(ctx::AbstractCompiler, lvl::VirtualPatternLevel, ::DefaultStyle) = :(PatternLevel()) virtualize(ctx, ex, ::Type{PatternLevel{Tp}}) where {Tp} = VirtualPatternLevel(Tp) diff --git a/src/tensors/levels/repeatrlelevels.jl b/src/tensors/levels/repeatrlelevels.jl index a641c7456..fd69fac41 100644 --- a/src/tensors/levels/repeatrlelevels.jl +++ b/src/tensors/levels/repeatrlelevels.jl @@ -134,7 +134,10 @@ mutable struct VirtualRepeatRLELevel <: AbstractVirtualLevel prev_pos end is_level_injective(ctx, ::VirtualRepeatRLELevel) = [false] -is_level_atomic(ctx, lvl::VirtualRepeatRLELevel) = false +is_level_atomic(ctx, lvl::VirtualRepeatRLELevel) = ([false], false) +function is_level_concurrent(ctx, lvl::VirtualRepeatRLELevel) + return ([false], false) +end function virtualize(ctx, ex, ::Type{RepeatRLELevel{D, Ti, Tp, Tv, Ptr, Idx, Val}}, tag=:lvl) where {D, Ti, Tp, Tv, Ptr, Idx, Val} sym = freshen(ctx, tag) diff --git a/src/tensors/levels/separatelevels.jl b/src/tensors/levels/separatelevels.jl index cba870a1b..c815ebd25 100644 --- a/src/tensors/levels/separatelevels.jl +++ b/src/tensors/levels/separatelevels.jl @@ -81,6 +81,7 @@ countstored_level(lvl::SeparateLevel, pos) = pos mutable struct VirtualSeparateLevel <: AbstractVirtualLevel lvl # stand in for the sublevel for virutal resize, etc. ex + val Tv Lvl Val @@ -88,23 +89,32 @@ end postype(lvl:: VirtualSeparateLevel) = postype(lvl.lvl) -is_level_injective(ctx, ::VirtualSeparateLevel) = [is_level_injective(ctx, lvl.lvl)..., true] -is_level_concurrent(ctx, ::VirtualSeparateLevel) = [is_level_concurrent(ctx, lvl.lvl)..., true] -is_level_atomic(ctx, lvl::VirtualSeparateLevel) = is_level_atomic(ctx, lvl.lvl) +is_level_injective(ctx, lvl::VirtualSeparateLevel) = [is_level_injective(ctx, lvl.lvl)..., true] +function is_level_atomic(ctx, lvl::VirtualSeparateLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSeparateLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return (data, true) +end function lower(ctx::AbstractCompiler, lvl::VirtualSeparateLevel, ::DefaultStyle) quote - $SeparateLevel{$(lvl.Lvl), $(lvl.Val)}($(ctx(lvl.lvl)), $(lvl.ex).val) + $SeparateLevel{$(lvl.Lvl), $(lvl.Val)}($(ctx(lvl.lvl)), $(lvl.val)) end end function virtualize(ctx, ex, ::Type{SeparateLevel{Lvl, Val}}, tag=:lvl) where {Lvl, Val} sym = freshen(ctx, tag) + pointers = freshen(ctx, tag, :_pointers) + push!(ctx.preamble, quote - $sym = $ex + $sym = $ex + $pointers = $ex.val end) lvl_2 = virtualize(ctx, :($ex.lvl), Lvl, sym) - VirtualSeparateLevel(lvl_2, sym, typeof(level_default(Lvl)), Lvl, Val) + VirtualSeparateLevel(lvl_2, sym, pointers, typeof(level_default(Lvl)), Lvl, Val) end Base.summary(lvl::VirtualSeparateLevel) = "Separate($(lvl.Lvl))" @@ -114,8 +124,23 @@ virtual_level_size(ctx, lvl::VirtualSeparateLevel) = virtual_level_size(ctx, lvl virtual_level_eltype(lvl::VirtualSeparateLevel) = virtual_level_eltype(lvl.lvl) virtual_level_default(lvl::VirtualSeparateLevel) = virtual_level_default(lvl.lvl) +function virtual_moveto_level(ctx, lvl::VirtualSeparateLevel, arch) + + # Need to move each pointer... + pointers = freshen(ctx.code, lvl.val) + push!(ctx.code.preamble, quote + $pointers = $(lvl.val) + $(lvl.val) = $moveto($(lvl.val), $(ctx(arch))) + end) + push!(ctx.code.epilogue, quote + $(lvl.val) = $pointers + end) + virtual_moveto_level(ctx, lvl.lvl, arch) +end + + function declare_level!(ctx, lvl::VirtualSeparateLevel, pos, init) - #declare_level!(ctx_2, lvl.lvl, literal(1), init) + #declare_level!(lvl.lvl, ctx_2, literal(1), init) return lvl end @@ -125,7 +150,7 @@ function assemble_level!(ctx, lvl::VirtualSeparateLevel, pos_start, pos_stop) pos = freshen(ctx.code, :pos) sym = freshen(ctx.code, :pointer_to_lvl) push!(ctx.code.preamble, quote - Finch.resize_if_smaller!($(lvl.ex).val, $(ctx(pos_stop))) + Finch.resize_if_smaller!($(lvl.val), $(ctx(pos_stop))) for $pos in $(ctx(pos_start)):$(ctx(pos_stop)) $sym = similar_level( $(lvl.ex).lvl, @@ -140,7 +165,7 @@ function assemble_level!(ctx, lvl::VirtualSeparateLevel, pos_start, pos_stop) push!(ctx_2.code.preamble, assemble_level!(ctx_2, lvl_2, literal(1), literal(1))) contain(ctx_2) do ctx_3 lvl_2 = freeze_level!(ctx_3, lvl_2, literal(1)) - :($(lvl.ex).val[$(ctx_3(pos))] = $(ctx_3(lvl_2))) + :($(lvl.val)[$(ctx_3(pos))] = $(ctx_3(lvl_2))) end end) end @@ -156,12 +181,12 @@ function reassemble_level!(ctx, lvl::VirtualSeparateLevel, pos_start, pos_stop) push!(ctx.code.preamble, quote for $idx in $(ctx(pos_start)):$(ctx(pos_stop)) $(contain(ctx) do ctx_2 - lvl_2 = virtualize(ctx_2.code, :($(lvl.ex).val[$idx]), lvl.Lvl, sym) + lvl_2 = virtualize(ctx_2.code, :($(lvl.val)[$idx]), lvl.Lvl, sym) push!(ctx_2.code.preamble, assemble_level!(ctx_2, lvl_2, literal(1), literal(1))) lvl_2 = declare_level!(ctx_2, lvl_2, literal(1), init) contain(ctx_2) do ctx_3 lvl_2 = freeze_level!(ctx_3, lvl_2, literal(1)) - :($(lvl.ex).val[$(ctx_3(pos))] = $(ctx_3(lvl_2))) + :($(lvl.val)[$(ctx_3(pos))] = $(ctx_3(lvl_2))) end end) end @@ -186,7 +211,7 @@ function instantiate(ctx, fbr::VirtualSubFiber{VirtualSeparateLevel}, mode::Read val = freshen(ctx.code, lvl.ex, :_val) return body = Thunk( body = (ctx) -> begin - lvl_2 = virtualize(ctx.code, :($(lvl.ex).val[$(ctx(pos))]), lvl.Lvl, sym) + lvl_2 = virtualize(ctx.code, :($(lvl.val)[$(ctx(pos))]), lvl.Lvl, sym) instantiate(ctx, VirtualSubFiber(lvl_2, literal(1)), mode, protos) end, ) @@ -199,14 +224,14 @@ function instantiate(ctx, fbr::VirtualSubFiber{VirtualSeparateLevel}, mode::Upda return body = Thunk( body = (ctx) -> begin - lvl_2 = virtualize(ctx.code, :($(lvl.ex).val[$(ctx(pos))]), lvl.Lvl, sym) + lvl_2 = virtualize(ctx.code, :($(lvl.val)[$(ctx(pos))]), lvl.Lvl, sym) lvl_2 = thaw_level!(ctx, lvl_2, literal(1)) push!(ctx.code.preamble, assemble_level!(ctx, lvl_2, literal(1), literal(1))) res = instantiate(ctx, VirtualSubFiber(lvl_2, literal(1)), mode, protos) push!(ctx.code.epilogue, contain(ctx) do ctx_2 lvl_2 = freeze_level!(ctx_2, lvl_2, literal(1)) - :($(lvl.ex).val[$(ctx_2(pos))] = $(ctx_2(lvl_2))) + :($(lvl.val)[$(ctx_2(pos))] = $(ctx_2(lvl_2))) end ) res @@ -220,14 +245,14 @@ function instantiate(ctx, fbr::VirtualHollowSubFiber{VirtualSeparateLevel}, mode return body = Thunk( body = (ctx) -> begin - lvl_2 = virtualize(ctx.code, :($(lvl.ex).val[$(ctx(pos))]), lvl.Lvl, sym) + lvl_2 = virtualize(ctx.code, :($(lvl.val)[$(ctx(pos))]), lvl.Lvl, sym) lvl_2 = thaw_level!(ctx, lvl_2, literal(1)) push!(ctx.code.preamble, assemble_level!(ctx, lvl_2, literal(1), literal(1))) res = instantiate(ctx, VirtualHollowSubFiber(lvl_2, literal(1), fbr.dirty), mode, protos) push!(ctx.code.epilogue, contain(ctx) do ctx_2 lvl_2 = freeze_level!(ctx_2, lvl_2, literal(1)) - :($(lvl.ex).val[$(ctx_2(pos))] = $(ctx_2(lvl_2))) + :($(lvl.val)[$(ctx_2(pos))] = $(ctx_2(lvl_2))) end ) res diff --git a/src/tensors/levels/sparsebandlevels.jl b/src/tensors/levels/sparsebandlevels.jl index d40c4db53..cb44c7e60 100644 --- a/src/tensors/levels/sparsebandlevels.jl +++ b/src/tensors/levels/sparsebandlevels.jl @@ -137,7 +137,14 @@ mutable struct VirtualSparseBandLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseBandLevel) = [is_level_injective(ctx, lvl.lvl)..., false] -is_level_atomic(ctx, lvl::VirtualSparseBandLevel) = false +function is_level_atomic(ctx, lvl::VirtualSparseBandLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseBandLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end postype(lvl::VirtualSparseBandLevel) = postype(lvl.lvl) diff --git a/src/tensors/levels/sparsebytemaplevels.jl b/src/tensors/levels/sparsebytemaplevels.jl index b9eebf8a7..c0b1fbf07 100644 --- a/src/tensors/levels/sparsebytemaplevels.jl +++ b/src/tensors/levels/sparsebytemaplevels.jl @@ -140,7 +140,14 @@ mutable struct VirtualSparseByteMapLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseByteMapLevel) = [is_level_injective(ctx, lvl.lvl)..., false] -is_level_atomic(ctx, lvl::VirtualSparseByteMapLevel) = false +function is_level_atomic(ctx, lvl::VirtualSparseByteMapLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseByteMapLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end function virtualize(ctx, ex, ::Type{SparseByteMapLevel{Ti, Ptr, Tbl, Srt, Lvl}}, tag=:lvl) where {Ti, Ptr, Tbl, Srt, Lvl} sym = freshen(ctx, tag) @@ -180,9 +187,9 @@ function virtual_moveto_level(ctx::AbstractCompiler, lvl::VirtualSparseByteMapLe $ptr_2 = $(lvl.ptr) $tbl_2 = $(lvl.tbl) $srt_2 = $(lvl.srt) - $(lvl.ptr) = $moveto($(lvl.ptr), $(ctx(arch))) - $(lvl.tbl) = $moveto($(lvl.tbl), $(ctx(arch))) - $(lvl.srt) = $moveto($(lvl.srt), $(ctx(arch))) + $(lvl.ptr) = moveto($(lvl.ptr), $(ctx(arch))) + $(lvl.tbl) = moveto($(lvl.tbl), $(ctx(arch))) + $(lvl.srt) = moveto($(lvl.srt), $(ctx(arch))) end) push!(ctx.code.epilogue, quote $(lvl.ptr) = $ptr_2 diff --git a/src/tensors/levels/sparsecoolevels.jl b/src/tensors/levels/sparsecoolevels.jl index 0afdfa9c0..c676a80f8 100644 --- a/src/tensors/levels/sparsecoolevels.jl +++ b/src/tensors/levels/sparsecoolevels.jl @@ -154,7 +154,14 @@ mutable struct VirtualSparseCOOLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseCOOLevel) = [is_level_injective(ctx, lvl.lvl)..., (true for _ in 1:lvl.N)...] -is_level_atomic(ctx, lvl::VirtualSparseCOOLevel) = false +function is_level_atomic(ctx, lvl::VirtualSparseCOOLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic for _ in 1:lvl.N]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseCOOLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false for _ in 1:lvl.N]], false) +end function virtualize(ctx, ex, ::Type{SparseCOOLevel{N, TI, Ptr, Tbl, Lvl}}, tag=:lvl) where {N, TI, Ptr, Tbl, Lvl} sym = freshen(ctx, tag) diff --git a/src/tensors/levels/sparsehashlevels.jl b/src/tensors/levels/sparsehashlevels.jl index 894c8cd12..0647307b8 100644 --- a/src/tensors/levels/sparsehashlevels.jl +++ b/src/tensors/levels/sparsehashlevels.jl @@ -168,6 +168,16 @@ mutable struct VirtualSparseHashLevel <: AbstractVirtualLevel qos_stop Lvl end + +is_level_injective(ctx, lvl::VirtualSparseHashLevel) = [is_level_injective(ctx, lvl.lvl)..., (true for _ in 1:lvl.N)...] +function is_level_atomic(ctx, lvl::VirtualSparseHashLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic for _ in 1:lvl.N]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseHashLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false for _ in 1:lvl.N]], false) +end is_level_injective(ctx, lvl::VirtualSparseHashLevel) = [is_level_injective(ctx, lvl.lvl)..., (true for _ in 1:lvl.N)...] is_level_atomic(ctx, lvl::VirtualSparseHashLevel) = false diff --git a/src/tensors/levels/sparseintervallevels.jl b/src/tensors/levels/sparseintervallevels.jl index e7a29727a..fe4c1e4c9 100644 --- a/src/tensors/levels/sparseintervallevels.jl +++ b/src/tensors/levels/sparseintervallevels.jl @@ -144,9 +144,14 @@ mutable struct VirtualSparseIntervalLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseIntervalLevel) = [false, is_level_injective(ctx, lvl.lvl)...] -is_level_concurrent(ctx, lvl::VirtualSparseIntervalLevel) = [false, is_level_concurrent(ctx, lvl.lvl)...] -is_level_atomic(ctx, lvl::VirtualSparseIntervalLevel) = false - +function is_level_atomic(ctx, lvl::VirtualSparseIntervalLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseIntervalLevel) + (data, concurrent) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end function virtualize(ctx, ex, ::Type{SparseIntervalLevel{Ti, Ptr, Left, Right, Lvl}}, tag=:lvl) where {Ti, Ptr, Left, Right, Lvl} sym = freshen(ctx, tag) diff --git a/src/tensors/levels/sparselevels.jl b/src/tensors/levels/sparselevels.jl index 72f7faacc..505dd7bf9 100644 --- a/src/tensors/levels/sparselevels.jl +++ b/src/tensors/levels/sparselevels.jl @@ -254,7 +254,15 @@ mutable struct VirtualSparseLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseLevel) = [is_level_injective(ctx, lvl.lvl)..., false] -is_level_atomic(ctx, lvl::VirtualSparseLevel) = false +function is_level_atomic(ctx, lvl::VirtualSparseLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + #FIXME: + return ([data; [false]], false) +end function virtualize(ctx, ex, ::Type{SparseLevel{Ti, Tbl, Lvl}}, tag=:lvl) where {Ti, Tbl, Lvl} sym = freshen(ctx, tag) diff --git a/src/tensors/levels/sparselistlevels.jl b/src/tensors/levels/sparselistlevels.jl index 77a929411..ae2e9e260 100644 --- a/src/tensors/levels/sparselistlevels.jl +++ b/src/tensors/levels/sparselistlevels.jl @@ -138,7 +138,14 @@ mutable struct VirtualSparseListLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseListLevel) = [is_level_injective(ctx, lvl.lvl)..., false] -is_level_atomic(ctx, lvl::VirtualSparseListLevel) = false +function is_level_atomic(ctx, lvl::VirtualSparseListLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseListLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end function virtualize(ctx, ex, ::Type{SparseListLevel{Ti, Ptr, Idx, Lvl}}, tag=:lvl) where {Ti, Ptr, Idx, Lvl} sym = freshen(ctx, tag) diff --git a/src/tensors/levels/sparsepointlevels.jl b/src/tensors/levels/sparsepointlevels.jl index f4aeb872f..66da669fb 100644 --- a/src/tensors/levels/sparsepointlevels.jl +++ b/src/tensors/levels/sparsepointlevels.jl @@ -142,7 +142,15 @@ mutable struct VirtualSparsePointLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparsePointLevel) = [is_level_injective(ctx, lvl.lvl)..., false] -is_level_atomic(ctx, lvl::VirtualSparsePointLevel) = false + +function is_level_atomic(ctx, lvl::VirtualSparsePointLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparsePointLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end function virtualize(ctx, ex, ::Type{SparsePointLevel{Ti, Ptr, Idx, Lvl}}, tag=:lvl) where {Ti, Ptr, Idx, Lvl} sym = freshen(ctx, tag) diff --git a/src/tensors/levels/sparserlelevels.jl b/src/tensors/levels/sparserlelevels.jl index cbf915254..9b385d2ad 100644 --- a/src/tensors/levels/sparserlelevels.jl +++ b/src/tensors/levels/sparserlelevels.jl @@ -146,8 +146,14 @@ mutable struct VirtualSparseRLELevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseRLELevel) = [false, is_level_injective(ctx, lvl.lvl)...] -is_level_concurrent(ctx, lvl::VirtualSparseRLELevel) = [false, is_level_concurrent(ctx, lvl.lvl)...] -is_level_atomic(ctx, lvl::VirtualSparseRLELevel) = false +function is_level_atomic(ctx, lvl::VirtualSparseRLELevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseRLELevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end postype(lvl::VirtualSparseRLELevel) = postype(lvl.lvl) diff --git a/src/tensors/levels/sparsetrianglelevels.jl b/src/tensors/levels/sparsetrianglelevels.jl index 61df0589c..00a1b0ccf 100644 --- a/src/tensors/levels/sparsetrianglelevels.jl +++ b/src/tensors/levels/sparsetrianglelevels.jl @@ -111,7 +111,14 @@ mutable struct VirtualSparseTriangleLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseTriangleLevel) = [is_level_injective(ctx, lvl.lvl)..., (true for _ in 1:lvl.N)...] -is_level_atomic(ctx, lvl::VirtualSparseTriangleLevel) = is_level_atomic(ctx, lvl.lvl) +function is_level_atomic(ctx, lvl::VirtualSparseTriangleLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [lvl.N]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseTriangleLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [lvl.N]], false) +end postype(lvl::VirtualSparseTriangleLevel) = postype(lvl.lvl) diff --git a/src/tensors/levels/sparsevbllevels.jl b/src/tensors/levels/sparsevbllevels.jl index 59928805f..35bb0941a 100644 --- a/src/tensors/levels/sparsevbllevels.jl +++ b/src/tensors/levels/sparsevbllevels.jl @@ -146,8 +146,14 @@ mutable struct VirtualSparseVBLLevel <: AbstractVirtualLevel end is_level_injective(ctx, lvl::VirtualSparseVBLLevel) = [is_level_injective(ctx, lvl.lvl)..., false] -is_level_atomic(ctx, lvl::VirtualSparseVBLLevel) = false - +function is_level_atomic(ctx, lvl::VirtualSparseVBLLevel) + (below, atomic) = is_level_atomic(ctx, lvl.lvl) + return ([below; [atomic]], atomic) +end +function is_level_concurrent(ctx, lvl::VirtualSparseVBLLevel) + (data, _) = is_level_concurrent(ctx, lvl.lvl) + return ([data; [false]], false) +end postype(lvl::VirtualSparseVBLLevel) = postype(lvl.lvl) diff --git a/src/transforms/concurrent.jl b/src/transforms/concurrent.jl index 10ea492cf..bcef91c8b 100644 --- a/src/transforms/concurrent.jl +++ b/src/transforms/concurrent.jl @@ -15,11 +15,21 @@ function is_injective end """ is_atomic(ctx, tns) -Returns a boolean indicating whether it is safe to update the same element of the -tensor from multiple simultaneous threads. + Returns a tuple (below, overall) where below is a vector, indicating which indicies have an atomic that guards them, + and overall is a boolean that indicates is the last level had an atomic guarding it. """ function is_atomic end +""" + is_concurrent(ctx, tns) + + Returns a vector of booleans, one for each dimension of the tensor, indicating + whether the index can be written to without any execution state. So if a matrix returns [true, false], + then we can write to A[i, j] and A[i_2, j] without any shared execution state between the two, but + we can't write to A[i, j] and A[i, j_2] without carrying over execution state. +""" +function is_concurrent end + """ ensure_concurrent(root, ctx) @@ -43,6 +53,18 @@ function ensure_concurrent(root, ctx) end end + # Get all indicies in the parallel region. + indicies_in_region = [idx] + for node in PostOrderDFS(body) + if @capture node loop(~idxp, ~ext, ~body) + if !(idxp in indicies_in_region) + push!(indicies_in_region, idxp) + end + end + end + + + for (root, agns) in nonlocal_assigns ops = map(agn -> (@capture agn assign(~lhs, ~op, ~rhs); op), agns) if !allequal(ops) @@ -50,25 +72,77 @@ function ensure_concurrent(root, ctx) end accs = map(agn -> (@capture agn assign(~lhs, ~op, ~rhs); lhs), agns) - if !allequal(accs) - throw(FinchConcurrencyError("Nonlocal assignments to $(root) are not all the same access")) - end acc = first(accs) + # The operation must be associative. + oper = first(ops) + if !(isassociative(ctx.algebra, oper)) + if (length(ops) == 1) + if (@capture(acc, access(~tns, ~mode, ~i...))) + injectivityIdp:: Vector{Bool} = is_injective(ctx, tns) + concurrencyInfo = is_concurrent(ctx, tns) + if !all(injectivityIdp) || !all(concurrencyInfo) + throw(FinchConcurrencyError("Non-associative operations can only be parallelized in the case of a single injective acceses, but the injectivity is $(injectivity) and the concurrency is $(concurrencyInfo).")) + else - if !( - (@capture(acc, access(~tns, ~mode, ~i..., idx)) && is_injective(ctx, tns)[length(i) + 1]) || - isassociative(ctx.algebra, first(ops)) - ) - throw(FinchConcurrencyError("Nonlocal assignments to $(root) are not associative")) + continue # We pass via a single assignment that is completely injective. + end + else + throw(FinchConcurrencyError("Assignment $(acc) is invalid!")) + end + end + throw(FinchConcurrencyError("Nonlocal assignments to $(root) via $(oper) are not associative")) end + # If the acceses are different, then all acceses must be atomic. + if !allequal(accs) + for acc in accs + (below, _) = is_atomic(ctx, acc.tns) + concurrencyInfo = is_concurrent(ctx, acc.tns) + if !all(below) || !all(concurrencyInfo) + throw(FinchConcurrencyError("Nonlocal assignments to $(root) are not all the same access so concurrency and atomics are needed on all acceses!")) + end + end + continue + else + #Since all operations/acceses are the same, a more fine grained analysis takes place: + #Every access must be injective or they must all be atomic. + if (@capture(acc, access(~tns, ~mode, ~i...))) + locations_with_parallel_vars = [] + injectivity:: Vector{Bool} = is_injective(ctx, tns) + concurrencyInfo = is_concurrent(ctx, acc.tns) + for loc in 1:length(i) + if i[loc] in indicies_in_region + push!(locations_with_parallel_vars, loc + 1) + #off by one should go away + end + end + if length(locations_with_parallel_vars) == 0 + (below, overall) = is_atomic(ctx, acc.tns) + if !below[1] + throw(FinchConcurrencyError("Assignment $(acc) requires last level atomics!")) + # FIXME: we could do atomic operations here. + else + continue + end + end + + #TODO If we could prove that some indices do not depend on the parallel index, we could exempt them from this somehow. + if all(injectivity[[x-1 for x in locations_with_parallel_vars]]) && all(concurrencyInfo[[x-1 for x in locations_with_parallel_vars]]) + continue # We pass due to injectivity! + end + # FIXME: This could be more fine grained: atomics need to only protect the non-injectivity. + (below, _) = is_atomic(ctx, acc.tns) + if all(below[locations_with_parallel_vars]) && all(concurrencyInfo[[x-1 for x in locations_with_parallel_vars]]) + continue # we pass due to atomics! + else + throw(FinchConcurrencyError("Assignment $(acc) requires injectivity or atomics in at least places $(locations_with_parallel_vars), but does not have them, due to injectivity=$(injectivity) and atomics=$(below) and concurrency=$(concurrencyInfo).")) + end - if !( - (is_atomic(ctx, acc.tns)) || - (@capture(acc, access(~tns, ~mode, ~i..., idx)) && is_injective(ctx, tns)[length(i) + 1]) - ) - throw(FinchConcurrencyError("Cannot prove that $(acc) is safe to update from multiple threads")) + #TODO perhaps if the last access is the parallel index, we only need injectivity or atomics on the parallel one, and concurrency on that one only + else + throw(FinchConcurrencyError("Assignment $(acc) is invalid! ")) + end end end - + # we validated everything so we are done! return root end \ No newline at end of file diff --git a/test/reference64/debug_parallel_spmms_atomics.txt b/test/reference64/debug_parallel_spmms_atomics.txt new file mode 100644 index 000000000..20b20d91c --- /dev/null +++ b/test/reference64/debug_parallel_spmms_atomics.txt @@ -0,0 +1,12 @@ +julia> @finch begin + CR .= 0 + for i = _ + for j = _ + for k = _ + CR[i, j] += A[i, k] * B[k, j] + end + end + end + end +(CR = Tensor(Dense{Int64}(Dense{Int64}(Element{0, Int64, Int64}([-2897731851209917289, -629695198063022239, -1839315671276920483, 5286864533103673104, 3825294693186414387, 8187178306671075173, -6696676089258937870, -2520109756947128486, -20289434330540491, -5787994923599559523, -4690621447069628201, -3685474431862009818, 8058502605257017607, 5640589951569473215, 3849918686675863666, 7149264367810900310, -7918639420941648831, -219473507856870521, -4180149432006842267, -8067732119114098801, 5942621818508124344, 7575473134895965323, 5862393635570366967, 6727659729742216070, -9170686636535443653, -4380751952733187424, 2694275982218304171, -5622207889637091685, 6920480630532056176, -4695528432768100779, -7140250039144966679, 1647660469229977143, -5813535200943658065, -5108487689518882964, 105349284871233846, 3533551331957915009, -6421774673443915850, -8247488193287417608, 3465220782931455560, 2188739699892996976, 1738207526057006556, 7632400394958336912, -7185865295012645714, -5971975973441056185, 8619430294301720090, 6750704504721401573, 9011111656977179320, 3976717612392110624, -2855464758831674755, 5772312830190457687, 5705856240648847077, -6554416729703711956, -7671600437698476751, -2326593449830317024, 8199280755065301439, -5556594091992299220, 5996150198530173182, -66959108866504107, 8345000249932803365, 3831734041534197627, 6755491505173220572, -4659537153252579301, -2071191076655572025, 3524098254281235897, -6019699174620949342, 5299350675715047865, 5173546748898002610, 5857646227517503622, -8678855740040481941, 9205325132597008121, 2679464604904844147, 3500566488466366606, 2528618820389241136, -7731572199940279950, 6706492552101050489, -7671608615353517112, 8615322856175757155, 2041531185812457443, -4597918392009606570, 5959509106620377635, -6546274357862253284, -6562260887896769636, -3883002056644532447, 6035506081559639599, 2418315302191397462, 6881421639735021162, -2684431465983020283, 7901201147652586922, -7329472400299953800, -6044679867347527656, -645206169439960395, 9140148023567200526, -6127509662415728451, -2907877225542925760, 1233118105724836953, -6684966046725230146, 5958154597206584650, -8295737209797309852, -2979604794675458526, -1093882253634755281, 589184012956362892, -8424114923273878580, -5038237080122443733, -2993895945422711664, 8354882507439945414, -7839672476150261887, 1456259160890157121, -4086283412529733209, 5176260809151222856, 3240720920634065444, -8499485879017620677, -6050289119868059200, 5958349598028165604, -396224937468008404, -7165694695321191293, 2764858757025017434, -8420698995062416861, 8352477605870078083, 8303024327746227907, -555141626407569013, 7999293508001523726, -226937596482935795, -414770807101573456, 7733340213823592395, 1863301452732088327, 6886971882339478815, 6683734224802497383, 6616844232176954593, -7058003407463953990, 8352721930174564169, -4187114157022694158, 5748477590433860865, 8152913386940253201, 1020920222728157466, -6156712415657483053, 3333862940818551097, -5280640806937481328, -2149379470118826130, -6326372139327998932, -1741992002060548425, -2818822515203407205, -1376513383775957781, 7590418639273688222, 1878597322051548837, 3301930258766050494, 2698309426289683029, 6641576727395292413, 5641351493875895216, 2385361330962138121, -4697951389071377230, 3331879975516397594, 214446761555706988, -6302967254309343788, 4201214062774385848, 3001629630360800562, 5398598970413351811, 3977930090534176165, 3984924169482969364, 8718787914396782846, -6550382047311301941, -3261104062877111456, 2224794809086878195, 3438152747392650753, 78544168894719298, 5565037068078116938, -1672738603012014416, -7783861620402571343, 8492529133488827344, -8786384557214556272, -8457059655731752450, 3152148410818985269, -6831111477572095935, -5090582504848843787, 939132468745525960, -8517204160800388434, -2023583321278737464, -5170733752123814842, 7408140656467050750, 1577804908013158842, 5462193298750207551, 7161907659181072885, 1032806589746917760, -6710907771691354309, 583164181606912624, 826225025409670746, 1845184163111179877, -1523357483101184554, -1759596407461252666, -2023016500968681400, 8227146370220730262, -7604160361983971000, -1852466148179737427, -2137085600013354519, -8199126469188847765, 1220608622514992683, 3311994200347642239, -1290172956079612344, -6544644906908464211, 8790242329423795234, 7660876971320012823, -2303183605756682504, 2156964647309555257, 7740821051083555444, 496251688421899142, -2918158300642530090, -2368069312871487761, -5576966491634276936, 3368155889651124785, 2573867957286056550, 3389306480586276600, -6632582265816400415, 5044209673043616327, -4871176523865504714, 140138835390150585, -5886804929599432279, -1951197713667646473, 4789805153127521042, -2948816997562727089, 4686228127580110990, -8169064207993608829, -1713557455428160777, -984546056710531313, -6679796821988582829, 3241890112016235466, -1945417294428060463, 8843221800878523170, 2126242068639801859, 4834426592267409760, -7810773245566287394, 6321972250164355439, 3339063352739739362, 8708867835750565327, 6321587057145441117, 2053990000204048661, -4392484502580500113, -5967755670248374738, 3101480422349223349, 307777590593740658, 4073794276693964705, -228462136369587326, -8660809879270890381, -2132062400164338921, 4581848486722525106, -2566616859679265065, 1051393024393023791, 1025654123963368336, -5260929567928217513, -5435477509793003691, -6711840603538083559, -2197694495056747177, -287542043070388673, -4050250125529316224, -8946110539639396141, -2054023689822011870, -2235380394722248370, -4203094670443255817, -2387791590514481867, -4609523129340562901, 125534248568429316, 1346347296436801414, 5258287141063211013, -7241667893898745823, -7751509314980596778, -1869668919565833382, 4713835972811559470, 3254165350251922750, -4311839833988879206, -6337870317476815657, -6993345376598483895, 2941369493407468449, -1068171312197320483, -4339504352704496157, 5786024576578575289, 6300908636269959183, -4083524641983623681, 5987904725078853733, -7558116981616351650, 3248872891666573939, 8233544538525553495, 7452909854778854051, -9158321572523776465, 5132056847905343696, -1786674393390641542, 4322104503801715300, 2189701755760550962, -4365285277563237855, 8689970982250967826, -6301843793661836356, 1142969384972370970, -7685111598556905445, -8943371487314555071, 5927109178725189833, 6584733654197843790, 4129966099949983753, 3892361800311887490, 976504649055835441, 7250515625719943593, -2752580080442165331, -6396030823300205728, 2362672138095369962, -1260560443090480947, -1658998457453151306, -1718124828017906091, 9196167650710978616, 7608949355576985057, -817607203077398315, 1294358010623399614, 4307383593767711987, -768269503954087451, -6478246183817059591, -2358026556950361832, -7753105266059335775, -4000302419547338819, 3776165577496166557, 2584620804716842961, -1672311911634073875, 8278532580838608422, -2941729935949741794, 1933036231503941573, -3388180313413508413, 2046112873236671793, -7125588341208093183, 8727635799478651345, -8595636369460236813, -963949918148789794, -493405345440012110, 5049046844149422445, 7098408324430252846, 9223306068033466294, -6681811557413626672, -3284812297005480285, 2002270255788100639, -8858657205736903238, 3048384305946135030, 3612958165672678149, 986090196012349096, 2628110221527052317, 3405404297586403010, -7451628816118853549, -3338292389440533702, -7197099659529989383, -4900385429448633900, 3383358724275480052, 8034253264761273579, 6904177519136177176, -1998573972803109643, 1248104199460752608, -1235236915177455638, 4887216939031992310, 6223491257469423936, 1630288423252185577, -6112424948209314484, 8553758441866471319, -2425379776109253554, 2263729538657083381, -2043093745560065779, -929289066028444040, 7265187365292593329, -1580535609375864522, 2539593360595251369, 2622646908818713834, -2468050465805489066, -1896860363069572305, 8958059076969656951, 8854588258667373569, 3088434772754154022, -5688421953918974192, 2626230419890146200, -6763781199721815244, -6996731428735666557, 3708973810309285267, 4245138822197348460, -5001530863314072862, -6179973482153993649, -6597924114838480353, 1853884265239292742, -5915670873011808762, -1928251130853311162, 516467345239231767, 2764895242811094555, 7072973752563207485, 3704226432507665575, 1198940836595171688, 6047026749996160592, 1316512900863186973, 2872101096334766460, -7121400703248441688, -4223025996802232664, 7732298566549652512, -2939386878089948554, 3762127407801792388, -2857347841301145064, 170215569414046779, 6873323080314457686, 7172062585916986051, -1224317788452948956, 8665604532024649062, -3587696444806937141, -5612907008074524221, 1377040670764098707, -2118677304977686351, -7831278040734946053, -4938562009824852440, -5988287747485056812, -3219634092104508411, 5168862747273876500, -3600630809445155947, 7501020906256557042, 5050846388999994157, -7665674278860681899, -6313764560664955356, 2624945294329635356, -4583885093910321716, -9160463722466935585, -1922711743451490245, -7915085717343654739, 8197259727957483192, 1296622288070057647, 7811846479066499713, -2851032081953334936, 2556543701252443581, -3728691889258338754, 4600460226296431818, 1885055964305882751, 988542514653007448, -8796509572570350449, -5648563851556043855, 2124369605412566901, -3270522609663679424, -3501580887447459774, 7424884421743001057, -6903418298893868904, -5475163122892935503, -513909162403736845, 6353295544275849102, 606808889878357893, 4177093049627254554, -5036916523433329957, -6820132758506581004, -1037912375543763796, 7264174584540669442, 9088639648846841197, 8333658040852450381, -8445964194454904679, 3741829768403156095, 3915698333268154679, 7815344654185484287, 2741724569969250368, -7665103435529312384, 9100873308619106742, 3421464801940081678, -8701712540799509793, 1532410275442943151, 4130175416040730585, -1784554548711470489, 4076316007665273885, 2961704630956204982, -881583544869961373, 7663018629536003030, -5763518983518888334, 3114677371290572451, -1659099744568908543, -34203145603105886, -1220936557841603647, 5950943159651784646, -1860957886454194850, -6668509316234975116, -7964075269288373356, 7153329273830623611, -2279877691935167815, -8648699827118861771, -5569193215730144105, -6707905235378160735, 1975928828057488634, -7676333818029234506, -962360174048092334, 9102942620383716718, -8211815767999769075, -1219458741166705187, 1828786899714865915, -2731992684245152871, 5500714780506194558, -9448733915720396, -6587605919331184027, 8253276779505000767, 8977491268382594789, -3913954514225857093, 1600098662429808740, 4403013106300218329, -3615290149261296840, -7602159198292327640, -3428994073771037480, 8498215013320052235, 862416151405647038, 918739553945024580, 3633310031105340657, 4338559589321920089, 5763049141678311194, 4217012607785333969, 6176763510218655288, 4403108969366937744, 7203053735310781274, -6552719863129851122, -1200404146413085231, -481109999518009176, 7678689870167937387, 7543449643038450780, 854589718451472137, 2799546936599080881, -7997197238886600923, 8078858255651498074, 2656762659688063273, -1356568473271068944, -3379016696355014119, -6355326385667250316, -4361042370369490938, 7192221713130010079, -7758923720965810036, -6912453143910706331, 6885837876356001683, 5152846234669337263, 8447453881712439205, 402153291132863715, -6750146672340937861, -3393118343442891115, -6517024052493510341, -2943132662379212424, -8044464495001505943, 1785178527484246390, 681621430642192097, -3089775668746168925, -5356368166955859937, -648039146633558093, -4459060956602036476, -5059035968815435502, -3178319646733143608, -8497741606376559181, 7857276938454619545, -5060780955701053429, -3626191864770598065, -776440272103413584, 5741399641679533144, 1213054496399189563, -8992057068260536980, -1151177747661149209, -5709797590207038733, -5009117227720086744, 7235187757570179459, 6825189740690177118, -6414574604123166095, -8035861727747821273, -4788527601701811687, -425455094851125341, -3612261793149316407, 6216603891812766891, -4420186300715134411, 4208535996568976162, 1965801627336468939, -2666925795390037887, -1844733202521097892, 316834901837030005, 8322687882266752670, 6886841443144003325, 3100605368138358995, -4680526287142840799, -9058962223867524432, -1416310979853153038, 5252925471057379281, -835136455519949933, -4403659001846954410, 2600856067299103537, 4695776455192323732, -5521872523903551442, -436873383149802155, -4957000796888658921, 1120671453992271103, 1136827044585630017, -5769263847802282965, -5284326664401851426, 2001839843756918803, 6381970752796295779, 484547452792561762, -3426075066984557015, -5069703414344440848, -77159458541818432, -7932175542470358043, 728895892903089573, -8794307157940388294, 2010890356655210930, -9187540242108764494, -8389913656646221587, -4284614820306162621, 5714765770816303120, -1853597308443823882, -3664841326079761295, 4352396322316878508, -1893646183485487084, -3409029234055034076, -2428342727380151439, 7734795593051411858, -7060533431301017982, -1255169038879989661, 8571805461570268125, 3586527561240892983, 4524040196011651407, 7641249333251859407, -4948566001967601536, 6766760774843355516, -4006863069728015948, 310927621637971295, -3261807252586983044, 3571656263837952184, -8479883834209296092, 663441069719413006, -1094282207677440406, 6334667221915377235, -660903793371602657, -4870815058202514770, 1965508377592409194, -4708274026943956371, 8205544502233890354, -1477159235923536049, 4418957172671230322, -332023011661557674, -368016183737927639, 5637551099547826281, 2321455877798725322, -5708176602342049472, 8905607907016257661, 9006409007651340697, 612378497694320803, 1970428788526075539, 1723979102387250390, 3600358285292563552, 4393998645113586199, -2880825132479987473, -4301501787140301037, -6981957119181617364, -1152266657702512725, -1781350539387764563, 7794633497567474735, 8671579680572972864, -3245021758294115310, -2863718454930227690, 2564348809768682849, -2546106989111075634, -6349196663550598579, -75239193704415975, 5707498029532069961, 4866190449184534616, -470130125302034348, 396671001566421410, 4774256382472498807, 2578508964645255971, -6031682378035860212, 5767334338007366811, -8801856065478078708, 4918827747069032430, 1957768073882872810, -6787792199728380730, 7949377215807745145, -1067191332880834968, 2593668075400291801, -2599846743680689893, 9063299299571631135, -4778743211377369544, 8801417698864415908, 8353938051846592967, 4032063175704865429, 4279035617754754757, -4095569132398073066, -9028184362410282248, 2355277400915900761, -5829404808424873851, 427549106165806860, -5782066638677599342, 5029136798531686691, 269456919575806772, -5042387257487062646, 5460687080529034698, 1802015400165591924, -1293650749535894879, 7092309186717495961, -8899635748967826324, -8422742726402585920, 24027959463934555, -7053221393575912636, -195614362265234999, -372366678888242902, 6030731625134300918, -8797714065093588185, -5422878433323629922, 7046693448547834477, -8304404767521707126, -4688998650701563771, 759352611754272788, -947156452440985318, 8600824679243831277, 6860392432286569846, 5430503347373610183, -910175827200890427, -6984132761370499298, 5483738725812642402, 7839579949165080376, -6288830629129099672, -5498250561216205628, 6189457048927984799, 9008999048742994027, -7766055774924120343, 2615914051413535156, -5937542378454037173, -4798511705446060234, 7592045436211652624, 176680520703792092, -3380816032184011713, 7981734128948753501, -8670001297385339389, 8062954646030300488, 2517239586915939007, -5504320849162858674, -748941474983152750, -9082845087988087595, -5022173325835989997, -4813211069981056013, -6601087015482524438, -7722340044920901900, -5325507515925589198, -1100927782630193629, 3688534045787449501, 6352995152879089204, -7886007763609165988, 5679247030582752972, -5877302605504352293, -7045994988775998586, -2832932744054271214, -6804833994737701355, 4588313801928016958, -6941466689747828147, -1372542591561129045, -6127102430891897649, 1266986145533247613, -7611105852114147441, -3271194318583501611, 4445656458793579413, -3267893043013373469, -8093144289962605489, 6711312226866341622, -8277678046225958233, -5471124307960549061, 2693696444106523331, -6317966761150988845, -5112876250310399104, 240910759591524837, -2224504219483666942, 2265509625262188961, 1961500777767502962, -2550636249589664096, -2556425520536026150, -3655950819818828870, -7338503849212161751, 8854366705924429362, -2703095148421592081, 5809573216726110377, -8844479535534404387, 2919672374193260997, -5355119116868851081, -808698167336770034, -5683410319939254564, -7728053966254131755, -321554308338178839, -5254028563173431922, -5670401527161978490, -4722173536066352366, 3674169688852398222, 346429055988804695, -8855237842289651735, 9179190666443757633, 2654238586148313077, 1395184011538311305, -2708010266717561229, 1743828755480579702, 977414039741861801, 205966076875104763, -1287844513302484994, -2933748614730646953, -8938238616991969394, -6331463792019562128, 7944632885637612593, 1418051825125287167, 5342651445475634501, -7572209520694646749, 7925659769543925112, -8864942667991022955, 7192438234469176110, 4594481057169778298, -6531551718812891842, -5760739604432098152, -1404300420510161174, 9164607480812074522, -2717364471356054949, -5113482633954419326, 117340873524602808, 5789067532932684910, 2618990714266893685, 9043367480778948843, -1115858282680435719, 8410120019602063252, 8061516885472884387, -1178864042443837946, -7551425371434329273, 9127226502377373181, -4777597093099029661, 4982834285722194658, -5240814302621739666, -4973297102060614458, -2203620503187152930, -1042747704300621026, 194052734340975980, -6783678541807476786, 3762002711361760270, -4821488515904397267, -3112962597699238833, 8959857448670749918, -8186761287406838796, 8691495232077878966, 6246422415631216483, -2106589669179149317, 6783483768695922674, -9177262423244478330, -2735622614601210054, -2303633313683195111, -634617141193713175, -3740268570685691303, -6719318820687373453, -6434094810291932837, -7817163317032710596, 1072237688884858910, 8866289614183460139, 8334240281357908479, -6569479099465363477, -8551716956512538089, 4916276503601011334, 7966688880226771708, 4814167529719280529, -2947721217537404512, -5599068331791260302, 4455503761316084901, 8035514833072839756, -3644356069987827142, 861936131416706149, -3496407295730452570, 666438856299200289, -7887330960071207511, -7930312745483281157, 8996422284935592496, -2637464362094685415, 1963295807992721620, 506183055255936967, -8486267892195506528, -1840185452625716241, 6286927514107493536, -4859426896457524234, -4013814098249472699, -8022971673996200656, -8088198444893073833, 2845375436600030580, 9130660735719798664, 4292700635113766901, -4504095485741292859, 3966206596265981857, 2975901021160143703, 5574203306618918646, 8217393228394836439, 7313815728697977491, 6870397133241437323, 8827246562863390654, -8349719833342334371, -74126422939027171, -1473119885607901340, -2323368074801227054, 6057241726273072849, 2623646636693387253, 3553045186968744227, 8655665086888841853, 7918735294316718763, 1026252895804151494, -2750398427883169926, 4856433880333968795, -1292139832952283552, -2847685486181773516, 465249708710914090, 7733110196415935614, 1061789591194350829, -2365697060775996605, 1781600041036203628, 8356563469774814509, -7536886277349373362, 2138955836674791931, -7040674764275647124, -5128560546883108098, -8651525993365645935, 2541396036045051070, 2404439344222466201, -5547599488360442346, -7794704919839399925, -8226191146544350863, 289465486646058643, -4586039356379674945, 7127217100863214922, -4495250890385500896, -2106160018154711765, 9080154069785344338, 655749881663331040, -6702682552319724564, 5324130608559833474, 877503991687559961, 3727570709929290521, 6992899601663041449, -7225914066341699299, -2316207447599938648, 7174424563258555041, -7457465811191816446, -2649306442980402693, -8868878908098870682, -1583682207309600514, 6498553053087084728, -722404749390123102, 6000390856805189535, 5128263968350732757, -4981757423832768691, -1268181473434665332, 2817322512396508566, 1338819931934891643, 1188215238557882101, -1861569182445514783, 8665273160893157482, -2939830410252956771, -2127285219058086241, -3782475126155545900, -3455096037468242984, -8813427168856428910, -703261345900217736, -5190209151846810894, 3980121013042178043, 6551715899679428068, -162289065130978122, -2355471502272886103, 2368428981596798268, -1284112795425535272, 6005771077125219092, 2236414669243453690, -4142139392564608510, -4777867591038274996, -6014848273105675073, -6534456376829602337, -5256728626462857634, 1255881738156591285, -3394422962934634376, -4425055481779486102, -4443824818135042697, 2426831793918381907, -6907374042473582476, 6048796127055447448, -2272678458132723151, -7443188793966482350, -200612787973617049, -1055209411613510662, -2240853441978538977, 2000910562936779878, 5512397444498478792, -631027022038989776, -2540887355251016282, -808330423637887153, -3516162315098149747, -7150420697154350795, 5559294588465041617, -7679953624981913529, 3206896782037043210, -5918456670342606702, -6011476978059204414, -6803448329506791224, -2262616113939482372, 1830707888245593202, 9199386494476670912, -350299155362272581, 4033750021561075398, 1002680791926630680, 9090860437261576221, 4604994701630780601, -4008874455797206566, -3327866028915473529, -920506071437672085, 3798238127114670843, -4795130095301739600, -1214663376670460156, 7430608726696333532, 2336090016735039691, -8946140186723974929, -3849818766010170695, 79541014581602760, -188873531328188448, 8746613041922083035, 705304878837552140, 3628842864019629683, 7415827732172988578, 1880372183497206813, -1517256935656670334, -7006607669285740895, 4217774392339622252, -7328157689540771777, -3066218644699038259, -7755355436888968421, -3843090512756827397, -4676908407160826479, -4820214818736427899, -1534297001989231434, 36411752805366994, -9215222536127688250, -8112601031276821418, 2377958761034761541, -7495278766186370400, 1475505809389802173, 7853991037513241052, 8771523294670357413, -4176975783583370839, -1188770377463052137, 3693645855734581458, 6289980921484865499, -2711726024261605633, 7297325280615745501, 2917084630762900166, 2890785095650628185, -128995603325835103, 3512545688570833372, -2448656501846418923, -5916311759350628061, 9152075419722155947, -3028688098867409951, -5295091602636511429, -3525077333299278263, 4832579383513600090, -131342039598520341, -6916620327440072547, -8717323215058449690, -6925300634651412351, -4359900965247354470, 1671714198797655490, -603755978140655289, 3784038107468879343, -6561346577863567480, -3054321027958942854, 1260345309311533293, 2779246438860188550, 835477891416037886, 4150782592281302317, 4412280937506307160, 7840184006017790859, -6872434087727757949, -7770550854724102191, -4369953409386784930, -5259655072217021747, 4400299496394153260, -7665312343202223959, -3707544972038554952, 817262697332247781, 4430881011755576478, -672189226962908492, -6618642457902714881, 4832246179235573335, 4047672164621728661, 3916525392544903290, 4770838978206227850, -4647283855745997483, 5897338950506164426, -6375355749125438844, 7309309088563392070, -8479453484598642071, -1952119089374192784, 7999406905047669430, -4439940153654586900, 5636115854507408296, -3141473428610892845, 1105539432035575547, -9105690327362450647, -695648314577378424, 5420164724559591054, 9070763472166017336, -5868374445833317208, 553476081533201703, 7913795938061753615, -4688903595929460745, -5539857073305646079, 43167038057121257, -1466363441044051768, -2519092808672831537, 3991159189309383169, 8832310837872120482, 5959126991231754588, -8597097047573574144, 2828851076745794726, 5713525427035350560, 2791992792474311343, -6297097930125299017, 7513363145356384280, 2071385878194426058, 4377640110894262018, -3721368361400972755, -8414441175147129487, 8102325290170520951, 4688146170547922648, -6589905692333763353, 9189868470910541498, -5707771806498765001, -5132100760525575660, 4677757138973245197, 7658956595395712490, 1507138694195148440, -7147108304188406963, 7826907978186266402, 3800001249914948568, -276730835667420699, 1202706585181245627, -1325208370469476773, 6424572669624800255, -5601663843412321162, 3717741139618505447, 6982228870794897780, 8659925654555816010, -3224819560801998276, 3867888072918809237, -5973207719387516691, -8185096023176729940, 1404932864299613355, 7459956593367947589, 6981595561457409967, 9007331657542170262, -2442204458685261893, 3250388655388581940, 1018934735114632083, -8308140257840773909, -8070186233110415867, 8184649812918790793, -4557775901601272354, 5470065303329478870, -7962929285096395487, 7200077033021675971, 9127010903702554806, 5408131677043108574, -481132890383505739, 8138891781274782558, 8011666213695892343, -7616897186806493090, 3992273401756926447, -5913574848960422659, -1275095834384969041, -4330722661486017123, -4677206324150955054, 4551668336842649322, -3180046863464553916, -1649707448817564953, 948859285367516762, -8094457256211466610, 3749580520931852644, -1925698536538744644, -995165141514708500, -4778308806639492156, -6764346661410059643, 4758024216976659550, 3328963773495483805, 3816591272188421653, 5973378937920177343, 5758127044465435518, 7266994269382906086, -868252797862946597, -2639809526012449670, -2098847676723778285, -7126334360551692770, -3975275484912757810, -7838613754580268033, 2085391997232037414, -181287900920203102, 5722640323409358803, -2278214274132064011, 1673169579508156439, -2465023791664275984, -4993327989307860761, 7795251976294163819, 4350637392665187776, 2799536090856922665, 1575976096585472413, 8778591182624119171, 4554602193504603256, 6982709807982444806, -1032888638177635121, 6769518835763310610, -6993814939426173584, -3033039965748881924, 4594245208519427514, 890004252967169526, 7723594045672116439, 7404873194098551006, -5119034442145647418, -1003182518129878081, -6399081939090029100, -8829766830026588374, 1506563208940262163, -2724830829342578447, 4591481065070808760, 6679141463988407722, -5786802985760821346, 880409760797981763, -412069812386492652, -8530079771529564066, 8536920726729615867, 7344551241699232348, 4169435301592550283, 5090850225354266260, 2028322986360409872, -6210090123880536313, 3383521072402114504, 6129048006105224380, 2975990317070916964, -7645253874295549615, 8159289631431511881, 7891921730348598536, 3595678784699848076, -6593163503123193873, 3603656079635134623, -2838051562593151350, -8886275486052763227, -3259815092254808853, -4553567488232902804, -9033099494055027936, -5903523352223038298, 3374582155683136625, 8408471502221115383, 6305567015122203290, -6145225106537112279, 4860960331815729383, 7393520013171056434, -2663319626848557600, 4050833979413124597, -3323919042485780634, -8938279041032250265, 7289289719937881110, -5199115945863993590, -4587717542830787873, -4999978371958265657, 8130960626679650491, 4965964696177810985, -3359156341767982490, -4210083526277966010, -4895123207485341206, 2189757105924077955, -7538821454100695732, 6203062712562811126, 3406479498849248462, -835711237859802536, -7028764017537252648, 7869657899077368054, 7318283258821558625, 4926563671458013655, 6048607130027791000, -3282434354821337263, -3177953634950956205, -994987332924249812, -5210348334399572821, 3426032161604688278, 995906498906221947, -6669714024988763459, 5252224375617926421, 255954629787036668, 3651784133051475975, -4850336894014670084, -5898201401182364061, -6607942075213767700, -6628290181218952109, -1686354934897044465, 1041692782076064776, 6602224538654834746, -2201816748581014602, -7260064935588296345, -8754107075428156708, -1548965751505097957, -1405786728215016119, 5107058577867526961, -862625570543784867, -4904079340700075902, -6263395868212650615, -8314198251656574258, -1708638672892337706, -8146886646120949741, -682382147153973656, 7659097990590635236, -2170813151066002178, -7837314926960338269, -2264665145026274623, -8174948491890133759, 3497595378323033931, -3868917755112047077, 2196848182639524340, 8414166636468462128, -6166242497548643913, -4687429520165225083, 4769387398796788141, -8436882428240485104, -2058514188922269542, 7553135259606477757, 525193456273670869, 3972330969467081420, 2508846724140489659, -8540819034512978979, 1412809532011889035, -7297581154642276646, -1673776540874489460, -2074287685437574260, -2765283248078846152, 1334391905088544430, -3695946496330370848, -8042024345356942635, -4080828562481710756, -4047410188393025081, -765157642037170934, 3006291695337662295, 1756115464303170708, 3845401379798590070, -8580494626480357070, 8228879655024478248, 2128533814153841824, -5385456512177777838, -5212874967560540528, -6427483433146229865, 952335514680287265, -9113388660051631733, 2385958480374035348, 8095463139477987023, -3643750029265549706, 3355743840215050245, 912751109938179978, -6197968481546096776, 408579319047660064, -5191167807190741101, 4465734662737034022, -3928191454307593030, -3052072492860100646, -4170427744579550905, -8898371328647136705, 5970588749919013815, -8551490117414284394, -3106846058479917975, 5124561599786531247, 4327108214064548569, -1610757265047386093, -8613276680068600166, -7288774974886348436, -1099835576742887240, 8414515394498907642, -1500091365997267391, -6739291368308739660, -4550703355267804774, 2998389361849063221, 3192380930359444667, -336627004377843924, -2754770719772728938, 5305632681582738584, -9135806846405700403, 2072344406135774488, -9022114722560538647, 8243254759266521317, -1119620069863503210, 7501609090016826983, -3693509216496566113, 8288191370639345, -4504550336550382322, -8210087550809328470, 3560742721967676085, 1954013433170687436, 1092442872987647465, 2888334573583639589, -1114743592533466588, 1939713077541513251, -1584023156647382383, 5535789762266979811, -62128766896641644, -7083063174005035986, 8287731713865936126, 6523236334449194078, -7039783060145433652, 5037382359481537664, -4893178415256303052, -1515963066985392875, 7424658220157821643, -9003136366762412184, -6276987399875132881, -5078669836117968058, -8441563256467901412, -2459860432371550823, -6264272347267916581, -6481671433081154986, 6307095258548369991, -4641860270615800507, 4624554977014401453, -2350355296822578495, 2510742027157153802, -6104449646414899547, 7562856789673740616, -4461217512017847949, -3812612882360808506, -9102566207241473149, -2594783517616205092, -5630870553073190812, 8353023384051112751, 1791383190077649364, -8224630664573163275, 5087547422763015911, 2963115828419570640, -907504797900049454, -704571147854297764, -1035659828219395442, -3856518662761818106, -7293579210702096453, 2149907909040905346, 3308624336099905741, -2148933506497740479, 2375999919931314098, 1758519656997109601, 8487402453528486293, 8030750879900448221, 6713132201032781377, 2586918570300352837, -8390945511729247080, -8336370443388921627, -5680610584175813140, 3653722942763612730, -5185789366666774222, -241298443444894920, -5686775890105702236, 3675379031456686704, 5245103201722754669, 1187064011119656069, -6026678283399851221, 8591049199382709543, -958672431593447782, -702210303099315415, 6141700644167946529, 1269559835664386985, -7939339184526746570, -8206755691091464815, 1573762622811466348, 5042764193538399363, 6041686017358941696, -2204563597664378350, -3517953027551930083, 3450386990274712967, 5410516131045897426, -9168652629909563711, -3985581474541557365, -8926914702363749930, 1186520803665066925, 1406314913753154854, 8868870948674943529, 6880621071822169716, 5370316171401507943, 4412636705270629114, -5078083351531110766, 3444286606158608256, -8205752318338941515, 8547429608762811094, -6519966311034112113, -1413738022713835115, 5568569149610584750, 6036057437471586938, -4876856011049610728, 272429004485342079, -3453630842847634073, -1366095897731051074, 6125434214091636094, 2447320816512245255, -4613841215935115212, 1973419128161707218, 2364074815516185345, 327408451321196934, -4000821832372194325, -8173680808835518958, 1201684883987025461, 8283120638577463370, 6101343157291948849, 429137980701789403, 5953695369455659065, 225280172101699889, 8314353213185338142, -4153225850753091018, -2709351338879280173, 1333874750942894183, 3814703919906217381, -5262762653525349848, 6299969758847514665, -8620361299054446823, 3604599371704958244, 3286435348100780846, -148032240776399836, -6255252299693425683, 7744566125419552833, -9010095641685704853, -4051149875843384055, 1985802142813691565, 7734900020502299602, -3146935134195403429, -664596930311411291, -8769750433047880757, 4115748344452239762, 6579250046577308254, 230166954229289470, -2186474838868342008, 2822967420440860767, 3147759382065818401, 3604307846749968280, -5635678801840124930, -6480267541227441758, -8891785863539658148, 7624519770609622105, 4703064168443047462, 1503528009344243625, 2435730275994705699, -704612467710177938, 2716095510627024148, -5843705757092672559, -2127555302623279870, -7234615000952315167, 7327116537692662525, 976668274983827506, -1314056675469985361, -1084313373553359708, -3650905684498692619, -6148081834803737657, 7255448140949764860, 8414351741937111009, -5638941544192975342, -2000465179005686320, -7485612095154483020, 8933096104173943115, 3261780456138624565, 7203649271822671010, 3506662135314247672, -537067434385873329, 3993807761669762677, -2134215520595502171, 8321346744225053782, -915369335731016548, 5305181545836058879, -6449071113337142368, -7022450482680652886, -2839061872534347024, 8458271966978519952, 628772202173139052, 3955845515454987501, -5499542328839725888, -2035890135937295839, -1656074730486034531, -3738055661363781729, 4196346733537661626, -8994226689590814797, 7596099991109974512, 1408362957440871854, -3866453708520576835, 8669336497088992549, 1656305394858550182, 6665970590884194612, 3502684361668034129, -7236218742708433379, 1107639571211408928, 5155559334094600917, 5813987500346762357, -590364672163616948, 2135787635699137048, -5784621874338921853, 1739885765379029707, 5333254638660743049, 8227359479550096228, -9065061183636866169, 5966087696944371172, 6389037595171397903, 110402398408378349, 7329899675214643004, 2488037190780244359, 6213028525105682913, -6117557608858445440, 343605177114228186, 8136677684121418114, -417490750695207169, 224224927074988228, -7094521059363225253, 2021915242457765263, 9090683643352429941, 3165893283489796017, -5934554565919493180, -2490288551863293808, 6563412384396215601, -6875917325096992491, 5621790985801756235, -6441325051668825321, 1366467738961958365, -8037118782812102025, -2427863120940900369, -8835101525611276065, -897172911957976484, 7783534909683580425, -263807469835089121, 4065461948013132849, 6996315371396975761, 7637287758881127543, 6695518419213077666, 2059266273533652518, -7992378192466304104, -6276800368280142747, -987512944725400453, 1910779544435837952, 9074505704246826946, 5352404011797008706, 6533039265160905955, -9158479616074918744, 1783963500517883761, -7161933608211998183, 7149342488900266936, -2593262749889837216, 496188197202188252, -2633705999341305830, -7775600982513957221, 7899680229291824353, 3241510855281747185, 7720806757754290304, 8682849314703587995, -4085004815922627638, -8567866779175520279, -3001917968050400346, -7623487713366994140, -4718817841495528428, -8449730589296840345, 8652914565619576994, -7994068511067316015, 8846402259591257133, 7859951591356053817, -6100791520249080859, 4964344810086147690, -6206703396161747541, -1375612159603889656, 7486909632652090100, -6878258508361286187, 8521895020694687566, -8416065046502516572, 8076219769140527430, -1823155786313397330, -4473047795290192463, 6668697193101367703, 8509975182239814129, -1478692622612746293, 8198942719100662433, 5107500491173591915, 6609231621514050640, -8263234616162487594, 4717192723999904491, 3799574504967543729, -1341338053828588847, -5224431820516331518, 1216188210939342681, -7276470454754780658, -3322364408352331894, -2918315472886957071, 3165028934260615970, -2179059539457574700, 7111534941968179162, -2434404158709588901, 8594721941716072641, -2954711236751602282, -5700917256772405072, -3738516822917210994, -8465575782722614381, 8218284380678431987, 7457336109082397355, -5417610049159808277, 7103474311034728052, -4495500858058223882, -4398464591926490909, -6732388740653723591, -2351117218859064388, -1997475470120132838, 2805922364843633553, -6280448650347592812, 6040382867952112743, -8796146879572776253, 3957035111875251141, 1943390719552704411, 8095298154038699052, 905892709596785742, 7387530507337232273, 1471566453465022937, -4238299490036952274, 2663728718258422865, -2095154789368081238, 8278508703587099104, 213173026306416666, -1158119173466182257, 4393463925763154191, -5447059785870029882, 4746206872858617737, -983454241847368651, -6500663502557911036, -3679647868275408801, -7467093973893498269, -2343231552771025415, -4183041282540092407, 6942861801273605268, -4036454758812800153, 603971093762417862, 1625680607207229133, 2929551610498656197, -6064094773108945266, -2217219098691948623, -6101130806572644074, 4664961822075530361, 4748217794657020608, -5002928435288812341, 8222730668889634564, -2613229330943868561, 1237524241967177554, 2551706092164757755, -657774971470229283, 4630071886577725843, 137990046861394566, 9160273236537629574, 3334824062514757211, -1566494690770614800, 5143166668841154605, 2399924168480321003, 9125751201843434699, 3368820143402714347, -7101468258053763462, -8755238090442733943, 5436301567799969930, -3552208790436263977, 74844008617913713, -7062686209290305241, 256647045990534262, -7391604887291937843, 4192351853313259083, 2654277798521408102, -8054547589655036110, 5678926290872028773, -8717581728414501617, -6040227945712834742, 1337104785484137491, 461697261221293724, -3162894557815043135, 4018761301976443232, -1253277245645150037, -4913513448897983920, 6037499056727797782, 401319307208696262, -6461479463066486323, -5723467426488078172, 7279138455375778844, -6579088669040951495, -3177892045390829821, 4565661226296611136]), 42), 42)),) + diff --git a/test/reference64/debug_parallel_spmms_no_atomics.txt b/test/reference64/debug_parallel_spmms_no_atomics.txt new file mode 100644 index 000000000..d33ab2f46 --- /dev/null +++ b/test/reference64/debug_parallel_spmms_no_atomics.txt @@ -0,0 +1,1317 @@ +julia> @finch begin + CR .= 0 + for i = _ + for j = _ + for k = _ + CR[i, j] += A[i, k] * B[k, j] + end + end + end + end +(CR = Tensor(Dense{Int64}(Dense{Int64}(Element{0.0, Float64, Int64}([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.70856859734722, 0.0, 0.0, 0.0, 0.0, 0.40683416426697433, 0.0, 0.0, 0.5116135014596547, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11665081191597346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815260724911955, 0.0, 0.012792585308225086, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23414421666297028, 0.0, 0.049360715516608075, 0.0, 0.17114666211685992, 0.0, 0.0, 0.016462013473269064, 0.10889740119368015, 0.0, 0.0, 0.0, 0.00577445736315422, 0.03699291283586617, 0.0, 0.054643694670994745, 0.0, 0.0, 0.0, 0.08011353665096754, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1586425298490503, 0.5528778968703049, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22133004329744424, 0.198518145547811, 0.038821202773921835, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18278114382714625, 0.1962641247777959, 0.33892974018746314, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013014677888550407, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6923776912647246, 0.0, 0.0, 0.0, 0.0, 0.03372992996786747, 0.0, 0.0, 0.0, 0.16613962476105457, 0.0, 0.21040813690603985, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12848588404681333, 0.44877210624067243, 0.0, 0.0, 0.00922143015221846, 0.0, 0.334367843457963, 0.0, 0.3344728737871945, 0.0, 0.007609184274463018, 0.0, 0.0783652603407444, 0.0, 0.008880000989208168, 0.6489339658812214, 0.0, 0.0, 0.4325454998052285, 0.24409841671258664, 0.0, 0.5857475361200408, 0.0, 0.006543890265716072, 0.0, 0.0, 0.0, 0.008425346810589148, 0.0, 0.0, 0.16622566484891482, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27228959440031625, 0.16863240701914226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16867493346874554, 0.42330165278515836, 0.0, 0.0, 0.6457463733709866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12409061480277174, 0.0, 0.14869580809977814, 0.0, 0.0, 0.0, 0.0, 0.14872661472502066, 0.5062551989716859, 0.06509123699478471, 0.0, 0.026886171078575067, 0.0, 0.3503937118835625, 0.0, 0.13397275360612415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07224002362266076, 0.0, 0.0, 0.014356019852976252, 0.2658719803791635, 0.13166323290951196, 0.0, 0.0, 0.0, 0.0, 0.020308821426184575, 0.025635708510132263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007980130035150811, 0.4090538905999397, 0.0, 0.0, 0.008918289569362796, 0.31893009912431536, 0.0, 0.0, 0.0, 0.009525001505142072, 0.0, 0.0, 0.013278407346240287, 0.021967736559129595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12136922915980554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.279484838006678, 0.2506790805406071, 0.0, 0.0, 0.10182245842840729, 0.0, 0.0, 0.5745756063332743, 0.0, 0.5180568530499269, 0.0, 0.19018853437509187, 0.2984452378932974, 0.0, 0.0, 0.0, 0.2447378349547936, 0.0, 0.48522550177156104, 0.5906428332019245, 0.0, 0.0, 0.756905839792281, 0.0, 0.4128008966782496, 0.7173389246941403, 0.0, 0.4558370427008323, 0.6596592042653375, 0.33014948101925834, 0.0, 0.0, 0.0, 0.0, 0.17629318543732997, 0.5676926328184286, 0.6751757284159318, 0.18305092512120225, 0.0, 0.5526511646802545, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6816784313112039, 0.0, 0.0, 0.0, 0.0, 0.4384965381282249, 0.0, 0.5249476708016299, 0.5514304573066787, 0.0, 0.0, 0.0278361825874964, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38619300372291254, 0.0, 0.5997636810868653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20257599957103595, 0.0, 0.10844521538627999, 0.0, 1.2226089817686585, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04229463432907405, 0.0, 0.0, 0.0, 0.0, 0.19946965003853376, 0.048044013732540566, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.036066754029278635, 0.0, 0.16231330637401103, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6062852141104628, 0.40481276616279827, 0.0, 0.0, 0.2539845497787725, 0.0, 0.0, 0.5691049256163461, 0.572636917198234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.026986118300396385, 0.0, 0.0, 0.2578776387682179, 0.2201878278780288, 0.3521973225117564, 0.49204059164875263, 0.20695213801377368, 0.3889153033164169, 0.12024801465448598, 0.5015222202004627, 0.0, 0.0, 0.0, 0.25511838171251733, 0.3395573400007252, 0.0, 0.14076018335068016, 0.006079069440287111, 0.3120090695218348, 0.02735798901473654, 0.23564550369089346, 0.0, 0.5201790908279286, 0.42693711336544043, 0.0, 0.0, 0.32942094250755716, 0.0, 0.0, 0.0, 0.0, 0.42208957099917227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27253622974111436, 0.20091824181940837, 0.0, 0.0, 0.014451665708310772, 0.0648234167882922, 0.0, 0.0, 0.0, 0.0, 0.43998592755822313, 0.200499194569837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10517105271620596, 0.4525079677294799, 0.0, 0.0, 0.0, 0.3992933674102322, 0.0, 0.6336315500314595, 0.5683248347075297, 0.0, 0.03911423543998137, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5618719435725092, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13717215169360888, 0.6023639264726295, 0.0, 0.16451246906039718, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2428824655720991, 0.0, 0.0, 0.0, 0.1307489669067023, 0.016490708613327746, 0.0, 0.5332965109179645, 0.0, 0.72835164827528, 0.5456771996801212, 0.20306669911566266, 0.8042852233793947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08986996958215537, 0.0, 0.09372773893140769, 0.2910950907401526, 0.0, 0.0, 0.11400417325056827, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15073719147466486, 0.0, 0.0062702773417321524, 0.0, 0.11018067217476075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03517847753788876, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012565073058016527, 0.0, 0.013511533493035587, 0.0, 0.0, 0.0, 0.1160032767395455, 0.3288776375406247, 0.009384967607632352, 0.013704851364696426, 0.0, 0.0, 0.0, 0.10679904950368906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5396299966121684, 0.0, 0.0, 0.0, 0.4120885163537877, 0.0, 0.0, 0.0, 0.0, 0.1374333138690627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.308835769631579, 0.0, 0.0, 0.2935183046180839, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21641442424094412, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16667427577804808, 0.0, 0.0, 0.0, 0.0, 0.1072149118509185, 0.0, 0.0, 0.13482790109225204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32702264908263307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17999984836884408, 0.0, 0.0, 0.0, 0.0, 0.17960575635997003, 0.0, 0.0, 0.0, 0.0, 0.08691531027429766, 0.0, 0.2893303365574358, 0.0, 0.21730094701941294, 0.07940443022930238, 0.0, 0.0, 0.01060094130869171, 0.0, 0.0, 0.21897146914670435, 0.3193413479843402, 0.11502306512986679, 0.16078084781731777, 0.0, 0.0, 0.029138065953053547, 0.12899773650646262, 0.0, 0.0, 0.0, 0.0, 0.35209003333845906, 0.0, 0.0, 0.42881832168904727, 0.0, 0.07551665376676242, 0.0, 0.10828996414360535, 0.0, 0.0, 0.0, 0.06175478190339224, 0.07317612880730584, 0.0, 0.4573326969055594, 0.13616590004764864, 0.1733951270114995, 0.0, 0.0, 0.0, 0.0, 0.004682414366264115, 0.0, 0.0, 0.014457116882680743, 0.477538324169661, 0.6690522150262886, 0.0, 0.0, 0.35074798851029604, 0.0, 0.6171788496488323, 0.0, 0.0, 0.0, 0.4598571719367441, 0.020424541646230095, 0.5014147387061915, 0.0, 0.0, 0.18767693158032267, 0.0, 0.08703683411330847, 0.0, 0.3388800152965881, 0.004390191394809013, 0.2131890539109787, 0.0, 0.43631258167790793, 0.0, 0.0, 0.0, 0.0, 0.011243656021000366, 0.0, 0.0, 0.004302223899340062, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3252052607129126, 0.10943362605720088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4600533876598886, 0.0, 0.0, 0.0, 0.6238654988124779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17327928789102656, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3007942289792172, 0.49763260068933673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1811653464990684, 0.0, 0.7596663692737102, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24115083978451807, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4132016125575898, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30212649550849624, 0.45064666528381636, 0.0, 0.0, 1.0295831107021076, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4924133218118872, 0.0, 0.0, 0.6123145833841932, 1.0074416855456372, 0.8974627446983252, 0.0, 0.0, 0.39387754214687604, 0.6443411524082342, 0.0, 0.6700619016074552, 0.0, 0.0, 0.4377096051189058, 0.3977735630344448, 0.0, 0.0, 0.0, 0.054395363429349475, 0.46912699721752377, 0.09054692381782561, 0.0, 0.6320539215802606, 0.33393006705956935, 0.614294006786966, 0.0, 0.0, 0.3986581318369477, 0.630705395059908, 0.0, 0.4266820903955834, 0.6230830913144384, 0.0, 1.0831760350408295, 0.21088758697054613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7135848094649876, 0.06891111903866448, 0.0, 0.0, 0.0, 0.32920907856114856, 0.5438621275845595, 0.0, 0.4139962279186794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13809169802867077, 0.0, 0.14849341459532064, 0.0, 0.0, 0.0, 0.5533499952652239, 0.0, 0.10314194807289108, 0.15061800177708015, 0.0, 0.5514890096422472, 0.23190068111070972, 0.0, 0.701822721136001, 0.6294877235260824, 0.0, 0.0, 0.0, 1.1059687974440209, 0.0, 0.0, 0.0, 0.96169924491107, 0.0, 0.0, 0.0, 0.0, 0.06628312350990566, 0.0, 0.37743722057381907, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08765206247905213, 0.0, 0.5218343219200703, 0.0, 0.0, 0.0, 0.41461579287108996, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6671900885778136, 0.0, 0.0, 0.0, 0.028262864260027967, 0.7909490968152504, 0.04511006432931489, 0.0, 0.0, 0.0, 0.4564818142302977, 0.35113439772465294, 0.0, 0.0, 0.0, 0.03776705986522304, 0.6129906961879193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42034814190539166, 0.0, 0.0, 0.21638060933821013, 0.0, 0.0, 0.0, 0.2526638526129318, 0.03715346614099295, 0.6358424325058142, 0.0, 0.0, 0.04152129953915335, 0.04614236094065058, 0.0, 0.0, 0.07015688762319212, 0.05740172594394938, 0.18351207696535793, 0.0, 0.0, 0.008864400691370657, 0.0, 0.8651249823989057, 0.407678395491645, 0.845679782901505, 0.0, 0.0, 0.0, 0.0, 0.4753420456230014, 1.6107190368773618, 0.0, 0.0, 0.0638102435424578, 0.24041718723542044, 0.5177820553003522, 0.44619181405158337, 0.4489609795607919, 0.0, 0.07301898938869086, 0.0902695074635508, 0.4268753686592877, 0.0, 0.0, 0.0, 0.16408594940133228, 0.0, 0.0, 0.6736439849461895, 0.031382854877976686, 0.034000045192845295, 0.0, 0.0, 0.8630676095443404, 0.3932053636261741, 0.0, 0.0, 0.0, 0.23747714078691665, 0.2179752640521007, 0.0, 0.059020425946533776, 0.23507519209688904, 0.0, 0.6184935143504988, 0.18475168643298573, 0.3255150538360457, 0.0, 0.0, 0.10559996237493433, 0.258855010743052, 0.0, 0.0, 0.0, 0.0, 0.5931453366617909, 0.0, 0.012879882724963071, 0.34456425817306313, 0.0, 0.10363371305008902, 0.0, 0.0, 0.13032437159832005, 0.0, 0.0, 0.9136517678658531, 0.11645026631438779, 0.0, 0.0, 1.006195673838974, 0.0, 0.49183662858528426, 0.5242175629103283, 0.0, 0.0, 0.5842253265103485, 0.09175087532086124, 0.0, 0.0, 0.6346229344595797, 0.8483440750859271, 0.0, 0.0, 0.43168828228976236, 1.1746392457903614, 0.9786548179849323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22466432888463708, 0.0, 0.0, 0.23383689086971213, 0.07868759200794351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5047067971651431, 0.0, 0.04466167239654772, 0.0, 0.18031282035336027, 0.06880056169195574, 0.0, 0.039760598424459205, 0.2052193360923158, 0.0, 0.1245954319549744, 0.0, 0.0, 0.3371869924989234, 0.10076818165067654, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21628428501390357, 0.41878760793751535, 0.0, 0.2743772579349106, 0.0, 0.0, 0.0, 0.23737336558608516, 0.0, 0.5609891075534331, 0.0, 0.0, 0.0, 0.0, 0.11206304892649907, 0.03770992438488979, 0.0, 0.0, 0.17383517173220803, 0.252655932334268, 0.0, 0.1585306005724301, 0.0, 0.1865298137691172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05971061253533409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15913149160221682, 0.0, 0.4030910369398146, 0.0, 0.32022434202130456, 0.23687245172165972, 0.0, 0.3599325265793679, 0.0, 0.4765964362382492, 0.0, 0.0, 0.0, 0.0, 0.6107120779202261, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.573260313114729, 0.5768180939544553, 0.0, 0.17499066339826644, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7201822598869417, 0.0, 0.0, 0.0, 0.0, 0.08725982492406978, 0.5051841445138487, 0.0, 0.0, 0.0, 0.2569811590076317, 0.3964028116744446, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31198143094865527, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42812611727229755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40187139064927635, 0.40436549376609193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5533450612442088, 0.3541481069684905, 0.0, 0.0, 0.0, 0.1801509251180122, 0.0, 0.0, 0.0, 0.08899148634361333, 0.0, 0.4004935508153472, 0.1664002225353232, 0.6660561411338942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5274372110974553, 0.0, 0.0, 0.11745018036413006, 0.03952272816910389, 0.654574176319042, 0.0, 0.0, 0.0, 0.0, 0.29642163335908794, 0.1274499525156547, 0.03345490004078419, 0.0, 0.0, 0.07653915037041417, 0.0, 0.02978363268713748, 0.15372448023095572, 0.03967384765850083, 0.06258104057589436, 0.0, 0.0, 0.044337981986606795, 0.07548283043416787, 0.0, 0.0, 0.0, 0.04735429836324166, 0.0, 0.0, 0.10863396357319674, 0.1797235339241024, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2053113311622715, 0.0, 0.0, 0.6339058613302375, 0.26881844559242324, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.047384735026192754, 0.0, 0.09394657688969027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41199723535819494, 0.0, 0.0, 0.19249813639294353, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4930042077083581, 0.01569004651529754, 0.0, 0.1886409972802957, 0.0, 0.6504613027483894, 0.0, 0.0, 0.0, 0.4491308783325726, 0.0, 0.710600063326777, 0.0, 0.0, 0.0, 0.0, 0.6392481732081897, 0.0, 0.0, 0.21075790790635565, 0.143953089400151, 0.04517962123405222, 0.12446587761719602, 0.011602660433066613, 0.0, 0.021141064793252704, 0.017873704948355638, 0.0, 0.010329409925325224, 0.21730544865000528, 0.03874493610862431, 0.0, 0.0, 0.0, 0.04329986579681618, 0.02617857620219232, 0.0, 0.0, 0.14198488087669728, 0.04624555904799935, 0.33624630483183143, 0.0, 0.18065873234459212, 0.001167765010724762, 0.0, 0.41156050949339695, 0.06138103351330629, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45223330011890606, 0.0, 0.42719481956226096, 0.0, 0.09390028378745956, 0.03159803909732349, 0.0, 0.0, 0.0, 0.14464053921535813, 0.0, 0.355579453624945, 0.27582314245244144, 0.0, 0.0, 0.4779189904559178, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6540512683692249, 0.16386729047093346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08595593354654336, 0.0868518036911852, 0.1436872279500763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08976911428083846, 0.0, 0.0, 0.0, 0.06561633035108058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0209499775034313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06082234101182034, 0.19585779687130894, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01528743323594608, 0.0, 0.0, 0.0, 0.011174280569280867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3922416239543676, 0.0, 0.5308327922857218, 0.38811106643298066, 0.0, 0.5861742359361938, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010357877371534024, 0.03335404406511716, 0.2121541431315042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022181366862053645, 0.0, 0.0, 0.3232792127186002, 0.21780290259329063, 0.0, 0.0, 0.6724740217065875, 0.33454771410797746, 0.0, 0.0, 0.0, 0.0, 0.1517011050211833, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20421012621580428, 0.0, 0.0, 0.0, 0.09554663093280256, 0.0, 0.9066194809744413, 0.0, 0.6530356828494477, 0.0, 0.0, 0.4162310356447254, 0.2004622291406719, 0.0, 0.06531630990369706, 0.0, 0.0, 0.22516333873266767, 0.08751639468488193, 0.31273858097846424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10565952264223451, 0.25213753900728575, 0.0, 0.13605306731422323, 0.0, 0.0, 0.20467409186458246, 0.0, 0.0, 0.0, 0.14062561725182204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06654804138060808, 0.04780210989364991, 0.3312858803709398, 0.0, 0.09056986573031543, 0.0, 0.0, 0.22425218794100035, 0.6978010919630194, 0.5644822411311934, 0.0, 0.0, 0.0, 0.0, 0.25245963204466193, 0.5714387205668741, 0.0, 0.7794778768245345, 0.5229243836029062, 0.6068223768633771, 0.0, 0.0, 0.3439063861094934, 0.0, 0.5825998753106167, 0.07738278703840655, 0.0, 0.0, 0.45088731313609587, 0.020026145653267647, 0.49371220470576765, 0.0, 0.1967497337446331, 0.9333687888834381, 0.2687116640308751, 0.3109501951809905, 0.0, 0.628995837333548, 0.6431918870734405, 0.0, 0.0, 0.4278019777568812, 0.0, 0.0, 0.3426634565816645, 0.0, 0.7642079896603969, 0.15720215492969136, 0.0, 0.5627301734522071, 0.0, 0.0, 0.03992028432745955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8902694418730629, 0.24785210085431258, 0.0, 0.0, 0.0, 0.19750384789873962, 0.0, 0.0, 0.24837057467209334, 0.0, 0.0, 0.06196917660673224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9196979517908253, 0.47523150409994275, 0.0, 0.05084636736867684, 0.30281629383729847, 0.0, 0.0, 0.0, 0.0, 0.7797375130571516, 0.0, 0.0, 0.009193696508787878, 0.7164774465476489, 0.37223214885126943, 0.36077619715591613]), 42), 42)),) +julia> @finch_code begin + Ct .= 0 + for i = parallel(_) + for j = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +quote + Ct_lvl = ((ex.bodies[1]).bodies[1]).tns.bind.lvl + Ct_lvl_2 = Ct_lvl.lvl + Ct_lvl_3 = Ct_lvl_2.lvl + Ct_lvl_2_val = Ct_lvl_2.lvl.val + A_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl + A_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.ptr + A_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[1] + A_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[2] + A_lvl_val = A_lvl.lvl.val + B_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl + B_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.ptr + B_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[1] + B_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[2] + B_lvl_val = B_lvl.lvl.val + B_lvl.shape[1] == A_lvl.shape[2] || throw(DimensionMismatch("mismatched dimension limits ($(B_lvl.shape[1]) != $(A_lvl.shape[2]))")) + @warn "Performance Warning: non-concordant traversal of Ct[i, j] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + @warn "Performance Warning: non-concordant traversal of A[i, k] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + result = nothing + pos_stop = A_lvl.shape[1] * B_lvl.shape[2] + Finch.resize_if_smaller!(Ct_lvl_2_val, pos_stop) + Finch.fill_range!(Ct_lvl_2_val, 0.0, 1, pos_stop) + val = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + B_lvl_ptr = (Finch).moveto(B_lvl_ptr, CPU(Threads.nthreads())) + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2 = (Finch).moveto(B_lvl_tbl2, CPU(Threads.nthreads())) + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_4 = 1:Threads.nthreads() + phase_start_2 = max(1, 1 + fld(A_lvl.shape[1] * (i_4 + -1), Threads.nthreads())) + phase_stop_2 = min(A_lvl.shape[1], fld(A_lvl.shape[1] * i_4, Threads.nthreads())) + if phase_stop_2 >= phase_start_2 + for i_7 = phase_start_2:phase_stop_2 + B_lvl_q = B_lvl_ptr[1] + B_lvl_q_stop = B_lvl_ptr[1 + 1] + if B_lvl_q < B_lvl_q_stop + B_lvl_i_stop = B_lvl_tbl2[B_lvl_q_stop - 1] + else + B_lvl_i_stop = 0 + end + phase_stop_3 = min(B_lvl.shape[2], B_lvl_i_stop) + if phase_stop_3 >= 1 + if B_lvl_tbl2[B_lvl_q] < 1 + B_lvl_q = Finch.scansearch(B_lvl_tbl2, 1, B_lvl_q, B_lvl_q_stop - 1) + end + while true + B_lvl_i = B_lvl_tbl2[B_lvl_q] + B_lvl_q_step = B_lvl_q + if B_lvl_tbl2[B_lvl_q] == B_lvl_i + B_lvl_q_step = Finch.scansearch(B_lvl_tbl2, B_lvl_i + 1, B_lvl_q, B_lvl_q_stop - 1) + end + if B_lvl_i < phase_stop_3 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + B_lvl_i + Ct_lvl_2_q = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_7 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_2 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_2 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_2 = 0 + end + phase_stop_5 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_2) + if phase_stop_5 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_2 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_5 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_2 = B_lvl_tbl1[B_lvl_q_2] + phase_stop_6 = min(B_lvl_i_2, phase_stop_5, A_lvl_i) + if A_lvl_i == phase_stop_6 && B_lvl_i_2 == phase_stop_6 + B_lvl_2_val = B_lvl_val[B_lvl_q_2] + A_lvl_q_2 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_2 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_2 = 0 + end + phase_stop_7 = min(i_7, A_lvl_i_stop_2) + if phase_stop_7 >= i_7 + if A_lvl_tbl1[A_lvl_q] < i_7 + A_lvl_q_2 = Finch.scansearch(A_lvl_tbl1, i_7, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_2 = A_lvl_tbl1[A_lvl_q_2] + if A_lvl_i_2 < phase_stop_7 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + else + phase_stop_9 = min(A_lvl_i_2, phase_stop_7) + if A_lvl_i_2 == phase_stop_9 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_2 += 1 + elseif B_lvl_i_2 == phase_stop_6 + B_lvl_q_2 += 1 + elseif A_lvl_i == phase_stop_6 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_6 + 1 + end + end + B_lvl_q = B_lvl_q_step + else + phase_stop_14 = min(B_lvl_i, phase_stop_3) + if B_lvl_i == phase_stop_14 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + phase_stop_14 + Ct_lvl_2_q_2 = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_7 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_2 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_2 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_2 = 0 + end + phase_stop_15 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_2) + if phase_stop_15 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_2 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_15 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_2 = B_lvl_tbl1[B_lvl_q_2] + phase_stop_16 = min(B_lvl_i_2, A_lvl_i, phase_stop_15) + if A_lvl_i == phase_stop_16 && B_lvl_i_2 == phase_stop_16 + B_lvl_2_val_3 = B_lvl_val[B_lvl_q_2] + A_lvl_q_4 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_4 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_4 = 0 + end + phase_stop_17 = min(i_7, A_lvl_i_stop_4) + if phase_stop_17 >= i_7 + if A_lvl_tbl1[A_lvl_q] < i_7 + A_lvl_q_4 = Finch.scansearch(A_lvl_tbl1, i_7, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_4 = A_lvl_tbl1[A_lvl_q_4] + if A_lvl_i_4 < phase_stop_17 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + else + phase_stop_19 = min(A_lvl_i_4, phase_stop_17) + if A_lvl_i_4 == phase_stop_19 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_2 += 1 + elseif B_lvl_i_2 == phase_stop_16 + B_lvl_q_2 += 1 + elseif A_lvl_i == phase_stop_16 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_16 + 1 + end + end + B_lvl_q = B_lvl_q_step + end + break + end + end + end + end + end + end + resize!(val, A_lvl.shape[1] * B_lvl.shape[2]) + result = (Ct = Tensor((DenseLevel){Int64}((DenseLevel){Int64}(Ct_lvl_3, A_lvl.shape[1]), B_lvl.shape[2])),) + result +end +julia> @finch begin + Ct .= 0 + for i = parallel(_) + for j = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +(Ct = Tensor(Dense{Int64}(Dense{Int64}(Element{0.0, Float64, Int64}([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.70856859734722, 0.0, 0.0, 0.0, 0.0, 0.40683416426697433, 0.0, 0.0, 0.5116135014596547, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11665081191597346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815260724911955, 0.0, 0.012792585308225086, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23414421666297028, 0.0, 0.049360715516608075, 0.0, 0.17114666211685992, 0.0, 0.0, 0.016462013473269064, 0.10889740119368015, 0.0, 0.0, 0.0, 0.00577445736315422, 0.03699291283586617, 0.0, 0.054643694670994745, 0.0, 0.0, 0.0, 0.08011353665096754, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1586425298490503, 0.5528778968703049, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22133004329744424, 0.198518145547811, 0.038821202773921835, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18278114382714625, 0.1962641247777959, 0.33892974018746314, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013014677888550407, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6923776912647246, 0.0, 0.0, 0.0, 0.0, 0.03372992996786747, 0.0, 0.0, 0.0, 0.16613962476105457, 0.0, 0.21040813690603985, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12848588404681333, 0.44877210624067243, 0.0, 0.0, 0.00922143015221846, 0.0, 0.334367843457963, 0.0, 0.3344728737871945, 0.0, 0.007609184274463018, 0.0, 0.0783652603407444, 0.0, 0.008880000989208168, 0.6489339658812214, 0.0, 0.0, 0.4325454998052285, 0.24409841671258664, 0.0, 0.5857475361200408, 0.0, 0.006543890265716072, 0.0, 0.0, 0.0, 0.008425346810589148, 0.0, 0.0, 0.16622566484891482, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27228959440031625, 0.16863240701914226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16867493346874554, 0.42330165278515836, 0.0, 0.0, 0.6457463733709866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12409061480277174, 0.0, 0.14869580809977814, 0.0, 0.0, 0.0, 0.0, 0.14872661472502066, 0.5062551989716859, 0.06509123699478471, 0.0, 0.026886171078575067, 0.0, 0.3503937118835625, 0.0, 0.13397275360612415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07224002362266076, 0.0, 0.0, 0.014356019852976252, 0.2658719803791635, 0.13166323290951196, 0.0, 0.0, 0.0, 0.0, 0.020308821426184575, 0.025635708510132263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007980130035150811, 0.4090538905999397, 0.0, 0.0, 0.008918289569362796, 0.31893009912431536, 0.0, 0.0, 0.0, 0.009525001505142072, 0.0, 0.0, 0.013278407346240287, 0.021967736559129595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12136922915980554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.279484838006678, 0.2506790805406071, 0.0, 0.0, 0.10182245842840729, 0.0, 0.0, 0.5745756063332743, 0.0, 0.5180568530499269, 0.0, 0.19018853437509187, 0.2984452378932974, 0.0, 0.0, 0.0, 0.2447378349547936, 0.0, 0.48522550177156104, 0.5906428332019245, 0.0, 0.0, 0.756905839792281, 0.0, 0.4128008966782496, 0.7173389246941403, 0.0, 0.4558370427008323, 0.6596592042653375, 0.33014948101925834, 0.0, 0.0, 0.0, 0.0, 0.17629318543732997, 0.5676926328184286, 0.6751757284159318, 0.18305092512120225, 0.0, 0.5526511646802545, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6816784313112039, 0.0, 0.0, 0.0, 0.0, 0.4384965381282249, 0.0, 0.5249476708016299, 0.5514304573066787, 0.0, 0.0, 0.0278361825874964, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38619300372291254, 0.0, 0.5997636810868653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20257599957103595, 0.0, 0.10844521538627999, 0.0, 1.2226089817686585, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04229463432907405, 0.0, 0.0, 0.0, 0.0, 0.19946965003853376, 0.048044013732540566, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.036066754029278635, 0.0, 0.16231330637401103, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6062852141104628, 0.40481276616279827, 0.0, 0.0, 0.2539845497787725, 0.0, 0.0, 0.5691049256163461, 0.572636917198234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.026986118300396385, 0.0, 0.0, 0.2578776387682179, 0.2201878278780288, 0.3521973225117564, 0.49204059164875263, 0.20695213801377368, 0.3889153033164169, 0.12024801465448598, 0.5015222202004627, 0.0, 0.0, 0.0, 0.25511838171251733, 0.3395573400007252, 0.0, 0.14076018335068016, 0.006079069440287111, 0.3120090695218348, 0.02735798901473654, 0.23564550369089346, 0.0, 0.5201790908279286, 0.42693711336544043, 0.0, 0.0, 0.32942094250755716, 0.0, 0.0, 0.0, 0.0, 0.42208957099917227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27253622974111436, 0.20091824181940837, 0.0, 0.0, 0.014451665708310772, 0.0648234167882922, 0.0, 0.0, 0.0, 0.0, 0.43998592755822313, 0.200499194569837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10517105271620596, 0.4525079677294799, 0.0, 0.0, 0.0, 0.3992933674102322, 0.0, 0.6336315500314595, 0.5683248347075297, 0.0, 0.03911423543998137, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5618719435725092, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13717215169360888, 0.6023639264726295, 0.0, 0.16451246906039718, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2428824655720991, 0.0, 0.0, 0.0, 0.1307489669067023, 0.016490708613327746, 0.0, 0.5332965109179645, 0.0, 0.72835164827528, 0.5456771996801212, 0.20306669911566266, 0.8042852233793947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08986996958215537, 0.0, 0.09372773893140769, 0.2910950907401526, 0.0, 0.0, 0.11400417325056827, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15073719147466486, 0.0, 0.0062702773417321524, 0.0, 0.11018067217476075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03517847753788876, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012565073058016527, 0.0, 0.013511533493035587, 0.0, 0.0, 0.0, 0.1160032767395455, 0.3288776375406247, 0.009384967607632352, 0.013704851364696426, 0.0, 0.0, 0.0, 0.10679904950368906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5396299966121684, 0.0, 0.0, 0.0, 0.4120885163537877, 0.0, 0.0, 0.0, 0.0, 0.1374333138690627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.308835769631579, 0.0, 0.0, 0.2935183046180839, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21641442424094412, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16667427577804808, 0.0, 0.0, 0.0, 0.0, 0.1072149118509185, 0.0, 0.0, 0.13482790109225204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32702264908263307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17999984836884408, 0.0, 0.0, 0.0, 0.0, 0.17960575635997003, 0.0, 0.0, 0.0, 0.0, 0.08691531027429766, 0.0, 0.2893303365574358, 0.0, 0.21730094701941294, 0.07940443022930238, 0.0, 0.0, 0.01060094130869171, 0.0, 0.0, 0.21897146914670435, 0.3193413479843402, 0.11502306512986679, 0.16078084781731777, 0.0, 0.0, 0.029138065953053547, 0.12899773650646262, 0.0, 0.0, 0.0, 0.0, 0.35209003333845906, 0.0, 0.0, 0.42881832168904727, 0.0, 0.07551665376676242, 0.0, 0.10828996414360535, 0.0, 0.0, 0.0, 0.06175478190339224, 0.07317612880730584, 0.0, 0.4573326969055594, 0.13616590004764864, 0.1733951270114995, 0.0, 0.0, 0.0, 0.0, 0.004682414366264115, 0.0, 0.0, 0.014457116882680743, 0.477538324169661, 0.6690522150262886, 0.0, 0.0, 0.35074798851029604, 0.0, 0.6171788496488323, 0.0, 0.0, 0.0, 0.4598571719367441, 0.020424541646230095, 0.5014147387061915, 0.0, 0.0, 0.18767693158032267, 0.0, 0.08703683411330847, 0.0, 0.3388800152965881, 0.004390191394809013, 0.2131890539109787, 0.0, 0.43631258167790793, 0.0, 0.0, 0.0, 0.0, 0.011243656021000366, 0.0, 0.0, 0.004302223899340062, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3252052607129126, 0.10943362605720088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4600533876598886, 0.0, 0.0, 0.0, 0.6238654988124779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17327928789102656, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3007942289792172, 0.49763260068933673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1811653464990684, 0.0, 0.7596663692737102, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24115083978451807, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4132016125575898, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30212649550849624, 0.45064666528381636, 0.0, 0.0, 1.0295831107021076, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4924133218118872, 0.0, 0.0, 0.6123145833841932, 1.0074416855456372, 0.8974627446983252, 0.0, 0.0, 0.39387754214687604, 0.6443411524082342, 0.0, 0.6700619016074552, 0.0, 0.0, 0.4377096051189058, 0.3977735630344448, 0.0, 0.0, 0.0, 0.054395363429349475, 0.46912699721752377, 0.09054692381782561, 0.0, 0.6320539215802606, 0.33393006705956935, 0.614294006786966, 0.0, 0.0, 0.3986581318369477, 0.630705395059908, 0.0, 0.4266820903955834, 0.6230830913144384, 0.0, 1.0831760350408295, 0.21088758697054613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7135848094649876, 0.06891111903866448, 0.0, 0.0, 0.0, 0.32920907856114856, 0.5438621275845595, 0.0, 0.4139962279186794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13809169802867077, 0.0, 0.14849341459532064, 0.0, 0.0, 0.0, 0.5533499952652239, 0.0, 0.10314194807289108, 0.15061800177708015, 0.0, 0.5514890096422472, 0.23190068111070972, 0.0, 0.701822721136001, 0.6294877235260824, 0.0, 0.0, 0.0, 1.1059687974440209, 0.0, 0.0, 0.0, 0.96169924491107, 0.0, 0.0, 0.0, 0.0, 0.06628312350990566, 0.0, 0.37743722057381907, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08765206247905213, 0.0, 0.5218343219200703, 0.0, 0.0, 0.0, 0.41461579287108996, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6671900885778136, 0.0, 0.0, 0.0, 0.028262864260027967, 0.7909490968152504, 0.04511006432931489, 0.0, 0.0, 0.0, 0.4564818142302977, 0.35113439772465294, 0.0, 0.0, 0.0, 0.03776705986522304, 0.6129906961879193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42034814190539166, 0.0, 0.0, 0.21638060933821013, 0.0, 0.0, 0.0, 0.2526638526129318, 0.03715346614099295, 0.6358424325058142, 0.0, 0.0, 0.04152129953915335, 0.04614236094065058, 0.0, 0.0, 0.07015688762319212, 0.05740172594394938, 0.18351207696535793, 0.0, 0.0, 0.008864400691370657, 0.0, 0.8651249823989057, 0.407678395491645, 0.845679782901505, 0.0, 0.0, 0.0, 0.0, 0.4753420456230014, 1.6107190368773618, 0.0, 0.0, 0.0638102435424578, 0.24041718723542044, 0.5177820553003522, 0.44619181405158337, 0.4489609795607919, 0.0, 0.07301898938869086, 0.0902695074635508, 0.4268753686592877, 0.0, 0.0, 0.0, 0.16408594940133228, 0.0, 0.0, 0.6736439849461895, 0.031382854877976686, 0.034000045192845295, 0.0, 0.0, 0.8630676095443404, 0.3932053636261741, 0.0, 0.0, 0.0, 0.23747714078691665, 0.2179752640521007, 0.0, 0.059020425946533776, 0.23507519209688904, 0.0, 0.6184935143504988, 0.18475168643298573, 0.3255150538360457, 0.0, 0.0, 0.10559996237493433, 0.258855010743052, 0.0, 0.0, 0.0, 0.0, 0.5931453366617909, 0.0, 0.012879882724963071, 0.34456425817306313, 0.0, 0.10363371305008902, 0.0, 0.0, 0.13032437159832005, 0.0, 0.0, 0.9136517678658531, 0.11645026631438779, 0.0, 0.0, 1.006195673838974, 0.0, 0.49183662858528426, 0.5242175629103283, 0.0, 0.0, 0.5842253265103485, 0.09175087532086124, 0.0, 0.0, 0.6346229344595797, 0.8483440750859271, 0.0, 0.0, 0.43168828228976236, 1.1746392457903614, 0.9786548179849323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22466432888463708, 0.0, 0.0, 0.23383689086971213, 0.07868759200794351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5047067971651431, 0.0, 0.04466167239654772, 0.0, 0.18031282035336027, 0.06880056169195574, 0.0, 0.039760598424459205, 0.2052193360923158, 0.0, 0.1245954319549744, 0.0, 0.0, 0.3371869924989234, 0.10076818165067654, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21628428501390357, 0.41878760793751535, 0.0, 0.2743772579349106, 0.0, 0.0, 0.0, 0.23737336558608516, 0.0, 0.5609891075534331, 0.0, 0.0, 0.0, 0.0, 0.11206304892649907, 0.03770992438488979, 0.0, 0.0, 0.17383517173220803, 0.252655932334268, 0.0, 0.1585306005724301, 0.0, 0.1865298137691172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05971061253533409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15913149160221682, 0.0, 0.4030910369398146, 0.0, 0.32022434202130456, 0.23687245172165972, 0.0, 0.3599325265793679, 0.0, 0.4765964362382492, 0.0, 0.0, 0.0, 0.0, 0.6107120779202261, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.573260313114729, 0.5768180939544553, 0.0, 0.17499066339826644, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7201822598869417, 0.0, 0.0, 0.0, 0.0, 0.08725982492406978, 0.5051841445138487, 0.0, 0.0, 0.0, 0.2569811590076317, 0.3964028116744446, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31198143094865527, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42812611727229755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40187139064927635, 0.40436549376609193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5533450612442088, 0.3541481069684905, 0.0, 0.0, 0.0, 0.1801509251180122, 0.0, 0.0, 0.0, 0.08899148634361333, 0.0, 0.4004935508153472, 0.1664002225353232, 0.6660561411338942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5274372110974553, 0.0, 0.0, 0.11745018036413006, 0.03952272816910389, 0.654574176319042, 0.0, 0.0, 0.0, 0.0, 0.29642163335908794, 0.1274499525156547, 0.03345490004078419, 0.0, 0.0, 0.07653915037041417, 0.0, 0.02978363268713748, 0.15372448023095572, 0.03967384765850083, 0.06258104057589436, 0.0, 0.0, 0.044337981986606795, 0.07548283043416787, 0.0, 0.0, 0.0, 0.04735429836324166, 0.0, 0.0, 0.10863396357319674, 0.1797235339241024, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2053113311622715, 0.0, 0.0, 0.6339058613302375, 0.26881844559242324, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.047384735026192754, 0.0, 0.09394657688969027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41199723535819494, 0.0, 0.0, 0.19249813639294353, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4930042077083581, 0.01569004651529754, 0.0, 0.1886409972802957, 0.0, 0.6504613027483894, 0.0, 0.0, 0.0, 0.4491308783325726, 0.0, 0.710600063326777, 0.0, 0.0, 0.0, 0.0, 0.6392481732081897, 0.0, 0.0, 0.21075790790635565, 0.143953089400151, 0.04517962123405222, 0.12446587761719602, 0.011602660433066613, 0.0, 0.021141064793252704, 0.017873704948355638, 0.0, 0.010329409925325224, 0.21730544865000528, 0.03874493610862431, 0.0, 0.0, 0.0, 0.04329986579681618, 0.02617857620219232, 0.0, 0.0, 0.14198488087669728, 0.04624555904799935, 0.33624630483183143, 0.0, 0.18065873234459212, 0.001167765010724762, 0.0, 0.41156050949339695, 0.06138103351330629, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45223330011890606, 0.0, 0.42719481956226096, 0.0, 0.09390028378745956, 0.03159803909732349, 0.0, 0.0, 0.0, 0.14464053921535813, 0.0, 0.355579453624945, 0.27582314245244144, 0.0, 0.0, 0.4779189904559178, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6540512683692249, 0.16386729047093346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08595593354654336, 0.0868518036911852, 0.1436872279500763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08976911428083846, 0.0, 0.0, 0.0, 0.06561633035108058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0209499775034313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06082234101182034, 0.19585779687130894, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01528743323594608, 0.0, 0.0, 0.0, 0.011174280569280867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3922416239543676, 0.0, 0.5308327922857218, 0.38811106643298066, 0.0, 0.5861742359361938, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010357877371534024, 0.03335404406511716, 0.2121541431315042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022181366862053645, 0.0, 0.0, 0.3232792127186002, 0.21780290259329063, 0.0, 0.0, 0.6724740217065875, 0.33454771410797746, 0.0, 0.0, 0.0, 0.0, 0.1517011050211833, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20421012621580428, 0.0, 0.0, 0.0, 0.09554663093280256, 0.0, 0.9066194809744413, 0.0, 0.6530356828494477, 0.0, 0.0, 0.4162310356447254, 0.2004622291406719, 0.0, 0.06531630990369706, 0.0, 0.0, 0.22516333873266767, 0.08751639468488193, 0.31273858097846424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10565952264223451, 0.25213753900728575, 0.0, 0.13605306731422323, 0.0, 0.0, 0.20467409186458246, 0.0, 0.0, 0.0, 0.14062561725182204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06654804138060808, 0.04780210989364991, 0.3312858803709398, 0.0, 0.09056986573031543, 0.0, 0.0, 0.22425218794100035, 0.6978010919630194, 0.5644822411311934, 0.0, 0.0, 0.0, 0.0, 0.25245963204466193, 0.5714387205668741, 0.0, 0.7794778768245345, 0.5229243836029062, 0.6068223768633771, 0.0, 0.0, 0.3439063861094934, 0.0, 0.5825998753106167, 0.07738278703840655, 0.0, 0.0, 0.45088731313609587, 0.020026145653267647, 0.49371220470576765, 0.0, 0.1967497337446331, 0.9333687888834381, 0.2687116640308751, 0.3109501951809905, 0.0, 0.628995837333548, 0.6431918870734405, 0.0, 0.0, 0.4278019777568812, 0.0, 0.0, 0.3426634565816645, 0.0, 0.7642079896603969, 0.15720215492969136, 0.0, 0.5627301734522071, 0.0, 0.0, 0.03992028432745955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8902694418730629, 0.24785210085431258, 0.0, 0.0, 0.0, 0.19750384789873962, 0.0, 0.0, 0.24837057467209334, 0.0, 0.0, 0.06196917660673224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9196979517908253, 0.47523150409994275, 0.0, 0.05084636736867684, 0.30281629383729847, 0.0, 0.0, 0.0, 0.0, 0.7797375130571516, 0.0, 0.0, 0.009193696508787878, 0.7164774465476489, 0.37223214885126943, 0.36077619715591613]), 42), 42)),) +julia> @finch_code begin + Ct .= 0 + for i = _ + for j = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +quote + Ct_lvl = ((ex.bodies[1]).bodies[1]).tns.bind.lvl + Ct_lvl_2 = Ct_lvl.lvl + Ct_lvl_3 = Ct_lvl_2.lvl + Ct_lvl_2_val = Ct_lvl_2.lvl.val + A_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl + A_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.ptr + A_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[1] + A_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[2] + A_lvl_val = A_lvl.lvl.val + B_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl + B_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.ptr + B_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[1] + B_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[2] + B_lvl_val = B_lvl.lvl.val + B_lvl.shape[1] == A_lvl.shape[2] || throw(DimensionMismatch("mismatched dimension limits ($(B_lvl.shape[1]) != $(A_lvl.shape[2]))")) + @warn "Performance Warning: non-concordant traversal of Ct[i, j] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + @warn "Performance Warning: non-concordant traversal of A[i, k] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + result = nothing + pos_stop = A_lvl.shape[1] * B_lvl.shape[2] + Finch.resize_if_smaller!(Ct_lvl_2_val, pos_stop) + Finch.fill_range!(Ct_lvl_2_val, 0.0, 1, pos_stop) + for i_4 = 1:A_lvl.shape[1] + val = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + B_lvl_ptr_2 = B_lvl_ptr + B_lvl_ptr = (Finch).moveto(B_lvl_ptr, CPU(Threads.nthreads())) + B_lvl_tbl1_2 = B_lvl_tbl1 + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2_2 = B_lvl_tbl2 + B_lvl_tbl2 = (Finch).moveto(B_lvl_tbl2, CPU(Threads.nthreads())) + val_2 = B_lvl_val + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + A_lvl_ptr_2 = A_lvl_ptr + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1_2 = A_lvl_tbl1 + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2_2 = A_lvl_tbl2 + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + val_3 = A_lvl_val + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_5 = 1:Threads.nthreads() + B_lvl_q = B_lvl_ptr[1] + B_lvl_q_stop = B_lvl_ptr[1 + 1] + if B_lvl_q < B_lvl_q_stop + B_lvl_i_stop = B_lvl_tbl2[B_lvl_q_stop - 1] + else + B_lvl_i_stop = 0 + end + phase_start_2 = max(1, 1 + fld(B_lvl.shape[2] * (i_5 + -1), Threads.nthreads())) + phase_stop_2 = min(B_lvl.shape[2], B_lvl_i_stop, fld(B_lvl.shape[2] * i_5, Threads.nthreads())) + if phase_stop_2 >= phase_start_2 + if B_lvl_tbl2[B_lvl_q] < phase_start_2 + B_lvl_q = Finch.scansearch(B_lvl_tbl2, phase_start_2, B_lvl_q, B_lvl_q_stop - 1) + end + while true + B_lvl_i = B_lvl_tbl2[B_lvl_q] + B_lvl_q_step = B_lvl_q + if B_lvl_tbl2[B_lvl_q] == B_lvl_i + B_lvl_q_step = Finch.scansearch(B_lvl_tbl2, B_lvl_i + 1, B_lvl_q, B_lvl_q_stop - 1) + end + if B_lvl_i < phase_stop_2 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + B_lvl_i + Ct_lvl_2_q = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_4 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_3 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_3 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_3 = 0 + end + phase_stop_4 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_3) + if phase_stop_4 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_3 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_4 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_3 = B_lvl_tbl1[B_lvl_q_3] + phase_stop_5 = min(B_lvl_i_3, phase_stop_4, A_lvl_i) + if A_lvl_i == phase_stop_5 && B_lvl_i_3 == phase_stop_5 + B_lvl_2_val = B_lvl_val[B_lvl_q_3] + A_lvl_q_2 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_2 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_2 = 0 + end + phase_stop_6 = min(i_4, A_lvl_i_stop_2) + if phase_stop_6 >= i_4 + if A_lvl_tbl1[A_lvl_q] < i_4 + A_lvl_q_2 = Finch.scansearch(A_lvl_tbl1, i_4, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_2 = A_lvl_tbl1[A_lvl_q_2] + if A_lvl_i_2 < phase_stop_6 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + else + phase_stop_8 = min(A_lvl_i_2, phase_stop_6) + if A_lvl_i_2 == phase_stop_8 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_3 += 1 + elseif B_lvl_i_3 == phase_stop_5 + B_lvl_q_3 += 1 + elseif A_lvl_i == phase_stop_5 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_5 + 1 + end + end + B_lvl_q = B_lvl_q_step + else + phase_stop_13 = min(B_lvl_i, phase_stop_2) + if B_lvl_i == phase_stop_13 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + phase_stop_13 + Ct_lvl_2_q_2 = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_4 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_3 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_3 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_3 = 0 + end + phase_stop_14 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_3) + if phase_stop_14 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_3 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_14 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_3 = B_lvl_tbl1[B_lvl_q_3] + phase_stop_15 = min(B_lvl_i_3, A_lvl_i, phase_stop_14) + if A_lvl_i == phase_stop_15 && B_lvl_i_3 == phase_stop_15 + B_lvl_2_val_3 = B_lvl_val[B_lvl_q_3] + A_lvl_q_4 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_4 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_4 = 0 + end + phase_stop_16 = min(i_4, A_lvl_i_stop_4) + if phase_stop_16 >= i_4 + if A_lvl_tbl1[A_lvl_q] < i_4 + A_lvl_q_4 = Finch.scansearch(A_lvl_tbl1, i_4, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_4 = A_lvl_tbl1[A_lvl_q_4] + if A_lvl_i_4 < phase_stop_16 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + else + phase_stop_18 = min(A_lvl_i_4, phase_stop_16) + if A_lvl_i_4 == phase_stop_18 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_3 += 1 + elseif B_lvl_i_3 == phase_stop_15 + B_lvl_q_3 += 1 + elseif A_lvl_i == phase_stop_15 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_15 + 1 + end + end + B_lvl_q = B_lvl_q_step + end + break + end + end + end + end + Ct_lvl_2_val = val + B_lvl_ptr = B_lvl_ptr_2 + B_lvl_tbl1 = B_lvl_tbl1_2 + B_lvl_tbl2 = B_lvl_tbl2_2 + B_lvl_val = val_2 + A_lvl_ptr = A_lvl_ptr_2 + A_lvl_tbl1 = A_lvl_tbl1_2 + A_lvl_tbl2 = A_lvl_tbl2_2 + A_lvl_val = val_3 + end + resize!(Ct_lvl_2_val, A_lvl.shape[1] * B_lvl.shape[2]) + result = (Ct = Tensor((DenseLevel){Int64}((DenseLevel){Int64}(Ct_lvl_3, A_lvl.shape[1]), B_lvl.shape[2])),) + result +end +julia> @finch begin + Ct .= 0 + for i = _ + for j = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +(Ct = Tensor(Dense{Int64}(Dense{Int64}(Element{0.0, Float64, Int64}([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.70856859734722, 0.0, 0.0, 0.0, 0.0, 0.40683416426697433, 0.0, 0.0, 0.5116135014596547, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11665081191597346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815260724911955, 0.0, 0.012792585308225086, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23414421666297028, 0.0, 0.049360715516608075, 0.0, 0.17114666211685992, 0.0, 0.0, 0.016462013473269064, 0.10889740119368015, 0.0, 0.0, 0.0, 0.00577445736315422, 0.03699291283586617, 0.0, 0.054643694670994745, 0.0, 0.0, 0.0, 0.08011353665096754, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1586425298490503, 0.5528778968703049, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22133004329744424, 0.198518145547811, 0.038821202773921835, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18278114382714625, 0.1962641247777959, 0.33892974018746314, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013014677888550407, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6923776912647246, 0.0, 0.0, 0.0, 0.0, 0.03372992996786747, 0.0, 0.0, 0.0, 0.16613962476105457, 0.0, 0.21040813690603985, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12848588404681333, 0.44877210624067243, 0.0, 0.0, 0.00922143015221846, 0.0, 0.334367843457963, 0.0, 0.3344728737871945, 0.0, 0.007609184274463018, 0.0, 0.0783652603407444, 0.0, 0.008880000989208168, 0.6489339658812214, 0.0, 0.0, 0.4325454998052285, 0.24409841671258664, 0.0, 0.5857475361200408, 0.0, 0.006543890265716072, 0.0, 0.0, 0.0, 0.008425346810589148, 0.0, 0.0, 0.16622566484891482, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27228959440031625, 0.16863240701914226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16867493346874554, 0.42330165278515836, 0.0, 0.0, 0.6457463733709866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12409061480277174, 0.0, 0.14869580809977814, 0.0, 0.0, 0.0, 0.0, 0.14872661472502066, 0.5062551989716859, 0.06509123699478471, 0.0, 0.026886171078575067, 0.0, 0.3503937118835625, 0.0, 0.13397275360612415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07224002362266076, 0.0, 0.0, 0.014356019852976252, 0.2658719803791635, 0.13166323290951196, 0.0, 0.0, 0.0, 0.0, 0.020308821426184575, 0.025635708510132263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007980130035150811, 0.4090538905999397, 0.0, 0.0, 0.008918289569362796, 0.31893009912431536, 0.0, 0.0, 0.0, 0.009525001505142072, 0.0, 0.0, 0.013278407346240287, 0.021967736559129595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12136922915980554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.279484838006678, 0.2506790805406071, 0.0, 0.0, 0.10182245842840729, 0.0, 0.0, 0.5745756063332743, 0.0, 0.5180568530499269, 0.0, 0.19018853437509187, 0.2984452378932974, 0.0, 0.0, 0.0, 0.2447378349547936, 0.0, 0.48522550177156104, 0.5906428332019245, 0.0, 0.0, 0.756905839792281, 0.0, 0.4128008966782496, 0.7173389246941403, 0.0, 0.4558370427008323, 0.6596592042653375, 0.33014948101925834, 0.0, 0.0, 0.0, 0.0, 0.17629318543732997, 0.5676926328184286, 0.6751757284159318, 0.18305092512120225, 0.0, 0.5526511646802545, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6816784313112039, 0.0, 0.0, 0.0, 0.0, 0.4384965381282249, 0.0, 0.5249476708016299, 0.5514304573066787, 0.0, 0.0, 0.0278361825874964, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38619300372291254, 0.0, 0.5997636810868653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20257599957103595, 0.0, 0.10844521538627999, 0.0, 1.2226089817686585, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04229463432907405, 0.0, 0.0, 0.0, 0.0, 0.19946965003853376, 0.048044013732540566, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.036066754029278635, 0.0, 0.16231330637401103, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6062852141104628, 0.40481276616279827, 0.0, 0.0, 0.2539845497787725, 0.0, 0.0, 0.5691049256163461, 0.572636917198234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.026986118300396385, 0.0, 0.0, 0.2578776387682179, 0.2201878278780288, 0.3521973225117564, 0.49204059164875263, 0.20695213801377368, 0.3889153033164169, 0.12024801465448598, 0.5015222202004627, 0.0, 0.0, 0.0, 0.25511838171251733, 0.3395573400007252, 0.0, 0.14076018335068016, 0.006079069440287111, 0.3120090695218348, 0.02735798901473654, 0.23564550369089346, 0.0, 0.5201790908279286, 0.42693711336544043, 0.0, 0.0, 0.32942094250755716, 0.0, 0.0, 0.0, 0.0, 0.42208957099917227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27253622974111436, 0.20091824181940837, 0.0, 0.0, 0.014451665708310772, 0.0648234167882922, 0.0, 0.0, 0.0, 0.0, 0.43998592755822313, 0.200499194569837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10517105271620596, 0.4525079677294799, 0.0, 0.0, 0.0, 0.3992933674102322, 0.0, 0.6336315500314595, 0.5683248347075297, 0.0, 0.03911423543998137, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5618719435725092, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13717215169360888, 0.6023639264726295, 0.0, 0.16451246906039718, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2428824655720991, 0.0, 0.0, 0.0, 0.1307489669067023, 0.016490708613327746, 0.0, 0.5332965109179645, 0.0, 0.72835164827528, 0.5456771996801212, 0.20306669911566266, 0.8042852233793947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08986996958215537, 0.0, 0.09372773893140769, 0.2910950907401526, 0.0, 0.0, 0.11400417325056827, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15073719147466486, 0.0, 0.0062702773417321524, 0.0, 0.11018067217476075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03517847753788876, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012565073058016527, 0.0, 0.013511533493035587, 0.0, 0.0, 0.0, 0.1160032767395455, 0.3288776375406247, 0.009384967607632352, 0.013704851364696426, 0.0, 0.0, 0.0, 0.10679904950368906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5396299966121684, 0.0, 0.0, 0.0, 0.4120885163537877, 0.0, 0.0, 0.0, 0.0, 0.1374333138690627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.308835769631579, 0.0, 0.0, 0.2935183046180839, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21641442424094412, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16667427577804808, 0.0, 0.0, 0.0, 0.0, 0.1072149118509185, 0.0, 0.0, 0.13482790109225204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32702264908263307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17999984836884408, 0.0, 0.0, 0.0, 0.0, 0.17960575635997003, 0.0, 0.0, 0.0, 0.0, 0.08691531027429766, 0.0, 0.2893303365574358, 0.0, 0.21730094701941294, 0.07940443022930238, 0.0, 0.0, 0.01060094130869171, 0.0, 0.0, 0.21897146914670435, 0.3193413479843402, 0.11502306512986679, 0.16078084781731777, 0.0, 0.0, 0.029138065953053547, 0.12899773650646262, 0.0, 0.0, 0.0, 0.0, 0.35209003333845906, 0.0, 0.0, 0.42881832168904727, 0.0, 0.07551665376676242, 0.0, 0.10828996414360535, 0.0, 0.0, 0.0, 0.06175478190339224, 0.07317612880730584, 0.0, 0.4573326969055594, 0.13616590004764864, 0.1733951270114995, 0.0, 0.0, 0.0, 0.0, 0.004682414366264115, 0.0, 0.0, 0.014457116882680743, 0.477538324169661, 0.6690522150262886, 0.0, 0.0, 0.35074798851029604, 0.0, 0.6171788496488323, 0.0, 0.0, 0.0, 0.4598571719367441, 0.020424541646230095, 0.5014147387061915, 0.0, 0.0, 0.18767693158032267, 0.0, 0.08703683411330847, 0.0, 0.3388800152965881, 0.004390191394809013, 0.2131890539109787, 0.0, 0.43631258167790793, 0.0, 0.0, 0.0, 0.0, 0.011243656021000366, 0.0, 0.0, 0.004302223899340062, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3252052607129126, 0.10943362605720088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4600533876598886, 0.0, 0.0, 0.0, 0.6238654988124779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17327928789102656, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3007942289792172, 0.49763260068933673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1811653464990684, 0.0, 0.7596663692737102, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24115083978451807, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4132016125575898, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30212649550849624, 0.45064666528381636, 0.0, 0.0, 1.0295831107021076, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4924133218118872, 0.0, 0.0, 0.6123145833841932, 1.0074416855456372, 0.8974627446983252, 0.0, 0.0, 0.39387754214687604, 0.6443411524082342, 0.0, 0.6700619016074552, 0.0, 0.0, 0.4377096051189058, 0.3977735630344448, 0.0, 0.0, 0.0, 0.054395363429349475, 0.46912699721752377, 0.09054692381782561, 0.0, 0.6320539215802606, 0.33393006705956935, 0.614294006786966, 0.0, 0.0, 0.3986581318369477, 0.630705395059908, 0.0, 0.4266820903955834, 0.6230830913144384, 0.0, 1.0831760350408295, 0.21088758697054613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7135848094649876, 0.06891111903866448, 0.0, 0.0, 0.0, 0.32920907856114856, 0.5438621275845595, 0.0, 0.4139962279186794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13809169802867077, 0.0, 0.14849341459532064, 0.0, 0.0, 0.0, 0.5533499952652239, 0.0, 0.10314194807289108, 0.15061800177708015, 0.0, 0.5514890096422472, 0.23190068111070972, 0.0, 0.701822721136001, 0.6294877235260824, 0.0, 0.0, 0.0, 1.1059687974440209, 0.0, 0.0, 0.0, 0.96169924491107, 0.0, 0.0, 0.0, 0.0, 0.06628312350990566, 0.0, 0.37743722057381907, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08765206247905213, 0.0, 0.5218343219200703, 0.0, 0.0, 0.0, 0.41461579287108996, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6671900885778136, 0.0, 0.0, 0.0, 0.028262864260027967, 0.7909490968152504, 0.04511006432931489, 0.0, 0.0, 0.0, 0.4564818142302977, 0.35113439772465294, 0.0, 0.0, 0.0, 0.03776705986522304, 0.6129906961879193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42034814190539166, 0.0, 0.0, 0.21638060933821013, 0.0, 0.0, 0.0, 0.2526638526129318, 0.03715346614099295, 0.6358424325058142, 0.0, 0.0, 0.04152129953915335, 0.04614236094065058, 0.0, 0.0, 0.07015688762319212, 0.05740172594394938, 0.18351207696535793, 0.0, 0.0, 0.008864400691370657, 0.0, 0.8651249823989057, 0.407678395491645, 0.845679782901505, 0.0, 0.0, 0.0, 0.0, 0.4753420456230014, 1.6107190368773618, 0.0, 0.0, 0.0638102435424578, 0.24041718723542044, 0.5177820553003522, 0.44619181405158337, 0.4489609795607919, 0.0, 0.07301898938869086, 0.0902695074635508, 0.4268753686592877, 0.0, 0.0, 0.0, 0.16408594940133228, 0.0, 0.0, 0.6736439849461895, 0.031382854877976686, 0.034000045192845295, 0.0, 0.0, 0.8630676095443404, 0.3932053636261741, 0.0, 0.0, 0.0, 0.23747714078691665, 0.2179752640521007, 0.0, 0.059020425946533776, 0.23507519209688904, 0.0, 0.6184935143504988, 0.18475168643298573, 0.3255150538360457, 0.0, 0.0, 0.10559996237493433, 0.258855010743052, 0.0, 0.0, 0.0, 0.0, 0.5931453366617909, 0.0, 0.012879882724963071, 0.34456425817306313, 0.0, 0.10363371305008902, 0.0, 0.0, 0.13032437159832005, 0.0, 0.0, 0.9136517678658531, 0.11645026631438779, 0.0, 0.0, 1.006195673838974, 0.0, 0.49183662858528426, 0.5242175629103283, 0.0, 0.0, 0.5842253265103485, 0.09175087532086124, 0.0, 0.0, 0.6346229344595797, 0.8483440750859271, 0.0, 0.0, 0.43168828228976236, 1.1746392457903614, 0.9786548179849323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22466432888463708, 0.0, 0.0, 0.23383689086971213, 0.07868759200794351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5047067971651431, 0.0, 0.04466167239654772, 0.0, 0.18031282035336027, 0.06880056169195574, 0.0, 0.039760598424459205, 0.2052193360923158, 0.0, 0.1245954319549744, 0.0, 0.0, 0.3371869924989234, 0.10076818165067654, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21628428501390357, 0.41878760793751535, 0.0, 0.2743772579349106, 0.0, 0.0, 0.0, 0.23737336558608516, 0.0, 0.5609891075534331, 0.0, 0.0, 0.0, 0.0, 0.11206304892649907, 0.03770992438488979, 0.0, 0.0, 0.17383517173220803, 0.252655932334268, 0.0, 0.1585306005724301, 0.0, 0.1865298137691172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05971061253533409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15913149160221682, 0.0, 0.4030910369398146, 0.0, 0.32022434202130456, 0.23687245172165972, 0.0, 0.3599325265793679, 0.0, 0.4765964362382492, 0.0, 0.0, 0.0, 0.0, 0.6107120779202261, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.573260313114729, 0.5768180939544553, 0.0, 0.17499066339826644, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7201822598869417, 0.0, 0.0, 0.0, 0.0, 0.08725982492406978, 0.5051841445138487, 0.0, 0.0, 0.0, 0.2569811590076317, 0.3964028116744446, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31198143094865527, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42812611727229755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40187139064927635, 0.40436549376609193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5533450612442088, 0.3541481069684905, 0.0, 0.0, 0.0, 0.1801509251180122, 0.0, 0.0, 0.0, 0.08899148634361333, 0.0, 0.4004935508153472, 0.1664002225353232, 0.6660561411338942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5274372110974553, 0.0, 0.0, 0.11745018036413006, 0.03952272816910389, 0.654574176319042, 0.0, 0.0, 0.0, 0.0, 0.29642163335908794, 0.1274499525156547, 0.03345490004078419, 0.0, 0.0, 0.07653915037041417, 0.0, 0.02978363268713748, 0.15372448023095572, 0.03967384765850083, 0.06258104057589436, 0.0, 0.0, 0.044337981986606795, 0.07548283043416787, 0.0, 0.0, 0.0, 0.04735429836324166, 0.0, 0.0, 0.10863396357319674, 0.1797235339241024, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2053113311622715, 0.0, 0.0, 0.6339058613302375, 0.26881844559242324, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.047384735026192754, 0.0, 0.09394657688969027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41199723535819494, 0.0, 0.0, 0.19249813639294353, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4930042077083581, 0.01569004651529754, 0.0, 0.1886409972802957, 0.0, 0.6504613027483894, 0.0, 0.0, 0.0, 0.4491308783325726, 0.0, 0.710600063326777, 0.0, 0.0, 0.0, 0.0, 0.6392481732081897, 0.0, 0.0, 0.21075790790635565, 0.143953089400151, 0.04517962123405222, 0.12446587761719602, 0.011602660433066613, 0.0, 0.021141064793252704, 0.017873704948355638, 0.0, 0.010329409925325224, 0.21730544865000528, 0.03874493610862431, 0.0, 0.0, 0.0, 0.04329986579681618, 0.02617857620219232, 0.0, 0.0, 0.14198488087669728, 0.04624555904799935, 0.33624630483183143, 0.0, 0.18065873234459212, 0.001167765010724762, 0.0, 0.41156050949339695, 0.06138103351330629, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45223330011890606, 0.0, 0.42719481956226096, 0.0, 0.09390028378745956, 0.03159803909732349, 0.0, 0.0, 0.0, 0.14464053921535813, 0.0, 0.355579453624945, 0.27582314245244144, 0.0, 0.0, 0.4779189904559178, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6540512683692249, 0.16386729047093346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08595593354654336, 0.0868518036911852, 0.1436872279500763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08976911428083846, 0.0, 0.0, 0.0, 0.06561633035108058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0209499775034313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06082234101182034, 0.19585779687130894, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01528743323594608, 0.0, 0.0, 0.0, 0.011174280569280867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3922416239543676, 0.0, 0.5308327922857218, 0.38811106643298066, 0.0, 0.5861742359361938, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010357877371534024, 0.03335404406511716, 0.2121541431315042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022181366862053645, 0.0, 0.0, 0.3232792127186002, 0.21780290259329063, 0.0, 0.0, 0.6724740217065875, 0.33454771410797746, 0.0, 0.0, 0.0, 0.0, 0.1517011050211833, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20421012621580428, 0.0, 0.0, 0.0, 0.09554663093280256, 0.0, 0.9066194809744413, 0.0, 0.6530356828494477, 0.0, 0.0, 0.4162310356447254, 0.2004622291406719, 0.0, 0.06531630990369706, 0.0, 0.0, 0.22516333873266767, 0.08751639468488193, 0.31273858097846424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10565952264223451, 0.25213753900728575, 0.0, 0.13605306731422323, 0.0, 0.0, 0.20467409186458246, 0.0, 0.0, 0.0, 0.14062561725182204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06654804138060808, 0.04780210989364991, 0.3312858803709398, 0.0, 0.09056986573031543, 0.0, 0.0, 0.22425218794100035, 0.6978010919630194, 0.5644822411311934, 0.0, 0.0, 0.0, 0.0, 0.25245963204466193, 0.5714387205668741, 0.0, 0.7794778768245345, 0.5229243836029062, 0.6068223768633771, 0.0, 0.0, 0.3439063861094934, 0.0, 0.5825998753106167, 0.07738278703840655, 0.0, 0.0, 0.45088731313609587, 0.020026145653267647, 0.49371220470576765, 0.0, 0.1967497337446331, 0.9333687888834381, 0.2687116640308751, 0.3109501951809905, 0.0, 0.628995837333548, 0.6431918870734405, 0.0, 0.0, 0.4278019777568812, 0.0, 0.0, 0.3426634565816645, 0.0, 0.7642079896603969, 0.15720215492969136, 0.0, 0.5627301734522071, 0.0, 0.0, 0.03992028432745955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8902694418730629, 0.24785210085431258, 0.0, 0.0, 0.0, 0.19750384789873962, 0.0, 0.0, 0.24837057467209334, 0.0, 0.0, 0.06196917660673224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9196979517908253, 0.47523150409994275, 0.0, 0.05084636736867684, 0.30281629383729847, 0.0, 0.0, 0.0, 0.0, 0.7797375130571516, 0.0, 0.0, 0.009193696508787878, 0.7164774465476489, 0.37223214885126943, 0.36077619715591613]), 42), 42)),) +julia> @finch_code begin + Ct .= 0 + for j = parallel(_) + for i = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +quote + Ct_lvl = ((ex.bodies[1]).bodies[1]).tns.bind.lvl + Ct_lvl_2 = Ct_lvl.lvl + Ct_lvl_3 = Ct_lvl_2.lvl + Ct_lvl_2_val = Ct_lvl_2.lvl.val + A_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl + A_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.ptr + A_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[1] + A_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[2] + A_lvl_val = A_lvl.lvl.val + B_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl + B_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.ptr + B_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[1] + B_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[2] + B_lvl_val = B_lvl.lvl.val + B_lvl.shape[1] == A_lvl.shape[2] || throw(DimensionMismatch("mismatched dimension limits ($(B_lvl.shape[1]) != $(A_lvl.shape[2]))")) + @warn "Performance Warning: non-concordant traversal of A[i, k] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + result = nothing + pos_stop = A_lvl.shape[1] * B_lvl.shape[2] + Finch.resize_if_smaller!(Ct_lvl_2_val, pos_stop) + Finch.fill_range!(Ct_lvl_2_val, 0.0, 1, pos_stop) + val = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + B_lvl_ptr = (Finch).moveto(B_lvl_ptr, CPU(Threads.nthreads())) + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2 = (Finch).moveto(B_lvl_tbl2, CPU(Threads.nthreads())) + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_4 = 1:Threads.nthreads() + B_lvl_q = B_lvl_ptr[1] + B_lvl_q_stop = B_lvl_ptr[1 + 1] + if B_lvl_q < B_lvl_q_stop + B_lvl_i_stop = B_lvl_tbl2[B_lvl_q_stop - 1] + else + B_lvl_i_stop = 0 + end + phase_start_2 = max(1, 1 + fld(B_lvl.shape[2] * (i_4 + -1), Threads.nthreads())) + phase_stop_2 = min(B_lvl.shape[2], B_lvl_i_stop, fld(B_lvl.shape[2] * i_4, Threads.nthreads())) + if phase_stop_2 >= phase_start_2 + if B_lvl_tbl2[B_lvl_q] < phase_start_2 + B_lvl_q = Finch.scansearch(B_lvl_tbl2, phase_start_2, B_lvl_q, B_lvl_q_stop - 1) + end + while true + B_lvl_i = B_lvl_tbl2[B_lvl_q] + B_lvl_q_step = B_lvl_q + if B_lvl_tbl2[B_lvl_q] == B_lvl_i + B_lvl_q_step = Finch.scansearch(B_lvl_tbl2, B_lvl_i + 1, B_lvl_q, B_lvl_q_stop - 1) + end + if B_lvl_i < phase_stop_2 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + B_lvl_i + for i_6 = 1:A_lvl.shape[1] + Ct_lvl_2_q = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_6 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_3 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_3 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_3 = 0 + end + phase_stop_4 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_3) + if phase_stop_4 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_3 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_4 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_3 = B_lvl_tbl1[B_lvl_q_3] + phase_stop_5 = min(B_lvl_i_3, phase_stop_4, A_lvl_i) + if A_lvl_i == phase_stop_5 && B_lvl_i_3 == phase_stop_5 + B_lvl_2_val = B_lvl_val[B_lvl_q_3] + A_lvl_q_2 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_2 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_2 = 0 + end + phase_stop_6 = min(i_6, A_lvl_i_stop_2) + if phase_stop_6 >= i_6 + if A_lvl_tbl1[A_lvl_q] < i_6 + A_lvl_q_2 = Finch.scansearch(A_lvl_tbl1, i_6, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_2 = A_lvl_tbl1[A_lvl_q_2] + if A_lvl_i_2 < phase_stop_6 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + else + phase_stop_8 = min(A_lvl_i_2, phase_stop_6) + if A_lvl_i_2 == phase_stop_8 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_3 += 1 + elseif B_lvl_i_3 == phase_stop_5 + B_lvl_q_3 += 1 + elseif A_lvl_i == phase_stop_5 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_5 + 1 + end + end + end + B_lvl_q = B_lvl_q_step + else + phase_stop_13 = min(B_lvl_i, phase_stop_2) + if B_lvl_i == phase_stop_13 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + phase_stop_13 + for i_8 = 1:A_lvl.shape[1] + Ct_lvl_2_q_2 = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_8 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_3 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_3 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_3 = 0 + end + phase_stop_14 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_3) + if phase_stop_14 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_3 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_14 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_3 = B_lvl_tbl1[B_lvl_q_3] + phase_stop_15 = min(B_lvl_i_3, A_lvl_i, phase_stop_14) + if A_lvl_i == phase_stop_15 && B_lvl_i_3 == phase_stop_15 + B_lvl_2_val_3 = B_lvl_val[B_lvl_q_3] + A_lvl_q_4 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_4 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_4 = 0 + end + phase_stop_16 = min(i_8, A_lvl_i_stop_4) + if phase_stop_16 >= i_8 + if A_lvl_tbl1[A_lvl_q] < i_8 + A_lvl_q_4 = Finch.scansearch(A_lvl_tbl1, i_8, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_4 = A_lvl_tbl1[A_lvl_q_4] + if A_lvl_i_4 < phase_stop_16 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + else + phase_stop_18 = min(A_lvl_i_4, phase_stop_16) + if A_lvl_i_4 == phase_stop_18 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_3 += 1 + elseif B_lvl_i_3 == phase_stop_15 + B_lvl_q_3 += 1 + elseif A_lvl_i == phase_stop_15 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_15 + 1 + end + end + end + B_lvl_q = B_lvl_q_step + end + break + end + end + end + end + resize!(val, A_lvl.shape[1] * B_lvl.shape[2]) + result = (Ct = Tensor((DenseLevel){Int64}((DenseLevel){Int64}(Ct_lvl_3, A_lvl.shape[1]), B_lvl.shape[2])),) + result +end +julia> @finch begin + Ct .= 0 + for j = parallel(_) + for i = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +(Ct = Tensor(Dense{Int64}(Dense{Int64}(Element{0.0, Float64, Int64}([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.70856859734722, 0.0, 0.0, 0.0, 0.0, 0.40683416426697433, 0.0, 0.0, 0.5116135014596547, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11665081191597346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815260724911955, 0.0, 0.012792585308225086, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23414421666297028, 0.0, 0.049360715516608075, 0.0, 0.17114666211685992, 0.0, 0.0, 0.016462013473269064, 0.10889740119368015, 0.0, 0.0, 0.0, 0.00577445736315422, 0.03699291283586617, 0.0, 0.054643694670994745, 0.0, 0.0, 0.0, 0.08011353665096754, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1586425298490503, 0.5528778968703049, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22133004329744424, 0.198518145547811, 0.038821202773921835, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18278114382714625, 0.1962641247777959, 0.33892974018746314, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013014677888550407, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6923776912647246, 0.0, 0.0, 0.0, 0.0, 0.03372992996786747, 0.0, 0.0, 0.0, 0.16613962476105457, 0.0, 0.21040813690603985, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12848588404681333, 0.44877210624067243, 0.0, 0.0, 0.00922143015221846, 0.0, 0.334367843457963, 0.0, 0.3344728737871945, 0.0, 0.007609184274463018, 0.0, 0.0783652603407444, 0.0, 0.008880000989208168, 0.6489339658812214, 0.0, 0.0, 0.4325454998052285, 0.24409841671258664, 0.0, 0.5857475361200408, 0.0, 0.006543890265716072, 0.0, 0.0, 0.0, 0.008425346810589148, 0.0, 0.0, 0.16622566484891482, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27228959440031625, 0.16863240701914226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16867493346874554, 0.42330165278515836, 0.0, 0.0, 0.6457463733709866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12409061480277174, 0.0, 0.14869580809977814, 0.0, 0.0, 0.0, 0.0, 0.14872661472502066, 0.5062551989716859, 0.06509123699478471, 0.0, 0.026886171078575067, 0.0, 0.3503937118835625, 0.0, 0.13397275360612415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07224002362266076, 0.0, 0.0, 0.014356019852976252, 0.2658719803791635, 0.13166323290951196, 0.0, 0.0, 0.0, 0.0, 0.020308821426184575, 0.025635708510132263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007980130035150811, 0.4090538905999397, 0.0, 0.0, 0.008918289569362796, 0.31893009912431536, 0.0, 0.0, 0.0, 0.009525001505142072, 0.0, 0.0, 0.013278407346240287, 0.021967736559129595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12136922915980554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.279484838006678, 0.2506790805406071, 0.0, 0.0, 0.10182245842840729, 0.0, 0.0, 0.5745756063332743, 0.0, 0.5180568530499269, 0.0, 0.19018853437509187, 0.2984452378932974, 0.0, 0.0, 0.0, 0.2447378349547936, 0.0, 0.48522550177156104, 0.5906428332019245, 0.0, 0.0, 0.756905839792281, 0.0, 0.4128008966782496, 0.7173389246941403, 0.0, 0.4558370427008323, 0.6596592042653375, 0.33014948101925834, 0.0, 0.0, 0.0, 0.0, 0.17629318543732997, 0.5676926328184286, 0.6751757284159318, 0.18305092512120225, 0.0, 0.5526511646802545, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6816784313112039, 0.0, 0.0, 0.0, 0.0, 0.4384965381282249, 0.0, 0.5249476708016299, 0.5514304573066787, 0.0, 0.0, 0.0278361825874964, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38619300372291254, 0.0, 0.5997636810868653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20257599957103595, 0.0, 0.10844521538627999, 0.0, 1.2226089817686585, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04229463432907405, 0.0, 0.0, 0.0, 0.0, 0.19946965003853376, 0.048044013732540566, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.036066754029278635, 0.0, 0.16231330637401103, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6062852141104628, 0.40481276616279827, 0.0, 0.0, 0.2539845497787725, 0.0, 0.0, 0.5691049256163461, 0.572636917198234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.026986118300396385, 0.0, 0.0, 0.2578776387682179, 0.2201878278780288, 0.3521973225117564, 0.49204059164875263, 0.20695213801377368, 0.3889153033164169, 0.12024801465448598, 0.5015222202004627, 0.0, 0.0, 0.0, 0.25511838171251733, 0.3395573400007252, 0.0, 0.14076018335068016, 0.006079069440287111, 0.3120090695218348, 0.02735798901473654, 0.23564550369089346, 0.0, 0.5201790908279286, 0.42693711336544043, 0.0, 0.0, 0.32942094250755716, 0.0, 0.0, 0.0, 0.0, 0.42208957099917227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27253622974111436, 0.20091824181940837, 0.0, 0.0, 0.014451665708310772, 0.0648234167882922, 0.0, 0.0, 0.0, 0.0, 0.43998592755822313, 0.200499194569837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10517105271620596, 0.4525079677294799, 0.0, 0.0, 0.0, 0.3992933674102322, 0.0, 0.6336315500314595, 0.5683248347075297, 0.0, 0.03911423543998137, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5618719435725092, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13717215169360888, 0.6023639264726295, 0.0, 0.16451246906039718, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2428824655720991, 0.0, 0.0, 0.0, 0.1307489669067023, 0.016490708613327746, 0.0, 0.5332965109179645, 0.0, 0.72835164827528, 0.5456771996801212, 0.20306669911566266, 0.8042852233793947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08986996958215537, 0.0, 0.09372773893140769, 0.2910950907401526, 0.0, 0.0, 0.11400417325056827, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15073719147466486, 0.0, 0.0062702773417321524, 0.0, 0.11018067217476075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03517847753788876, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012565073058016527, 0.0, 0.013511533493035587, 0.0, 0.0, 0.0, 0.1160032767395455, 0.3288776375406247, 0.009384967607632352, 0.013704851364696426, 0.0, 0.0, 0.0, 0.10679904950368906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5396299966121684, 0.0, 0.0, 0.0, 0.4120885163537877, 0.0, 0.0, 0.0, 0.0, 0.1374333138690627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.308835769631579, 0.0, 0.0, 0.2935183046180839, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21641442424094412, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16667427577804808, 0.0, 0.0, 0.0, 0.0, 0.1072149118509185, 0.0, 0.0, 0.13482790109225204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32702264908263307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17999984836884408, 0.0, 0.0, 0.0, 0.0, 0.17960575635997003, 0.0, 0.0, 0.0, 0.0, 0.08691531027429766, 0.0, 0.2893303365574358, 0.0, 0.21730094701941294, 0.07940443022930238, 0.0, 0.0, 0.01060094130869171, 0.0, 0.0, 0.21897146914670435, 0.3193413479843402, 0.11502306512986679, 0.16078084781731777, 0.0, 0.0, 0.029138065953053547, 0.12899773650646262, 0.0, 0.0, 0.0, 0.0, 0.35209003333845906, 0.0, 0.0, 0.42881832168904727, 0.0, 0.07551665376676242, 0.0, 0.10828996414360535, 0.0, 0.0, 0.0, 0.06175478190339224, 0.07317612880730584, 0.0, 0.4573326969055594, 0.13616590004764864, 0.1733951270114995, 0.0, 0.0, 0.0, 0.0, 0.004682414366264115, 0.0, 0.0, 0.014457116882680743, 0.477538324169661, 0.6690522150262886, 0.0, 0.0, 0.35074798851029604, 0.0, 0.6171788496488323, 0.0, 0.0, 0.0, 0.4598571719367441, 0.020424541646230095, 0.5014147387061915, 0.0, 0.0, 0.18767693158032267, 0.0, 0.08703683411330847, 0.0, 0.3388800152965881, 0.004390191394809013, 0.2131890539109787, 0.0, 0.43631258167790793, 0.0, 0.0, 0.0, 0.0, 0.011243656021000366, 0.0, 0.0, 0.004302223899340062, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3252052607129126, 0.10943362605720088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4600533876598886, 0.0, 0.0, 0.0, 0.6238654988124779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17327928789102656, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3007942289792172, 0.49763260068933673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1811653464990684, 0.0, 0.7596663692737102, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24115083978451807, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4132016125575898, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30212649550849624, 0.45064666528381636, 0.0, 0.0, 1.0295831107021076, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4924133218118872, 0.0, 0.0, 0.6123145833841932, 1.0074416855456372, 0.8974627446983252, 0.0, 0.0, 0.39387754214687604, 0.6443411524082342, 0.0, 0.6700619016074552, 0.0, 0.0, 0.4377096051189058, 0.3977735630344448, 0.0, 0.0, 0.0, 0.054395363429349475, 0.46912699721752377, 0.09054692381782561, 0.0, 0.6320539215802606, 0.33393006705956935, 0.614294006786966, 0.0, 0.0, 0.3986581318369477, 0.630705395059908, 0.0, 0.4266820903955834, 0.6230830913144384, 0.0, 1.0831760350408295, 0.21088758697054613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7135848094649876, 0.06891111903866448, 0.0, 0.0, 0.0, 0.32920907856114856, 0.5438621275845595, 0.0, 0.4139962279186794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13809169802867077, 0.0, 0.14849341459532064, 0.0, 0.0, 0.0, 0.5533499952652239, 0.0, 0.10314194807289108, 0.15061800177708015, 0.0, 0.5514890096422472, 0.23190068111070972, 0.0, 0.701822721136001, 0.6294877235260824, 0.0, 0.0, 0.0, 1.1059687974440209, 0.0, 0.0, 0.0, 0.96169924491107, 0.0, 0.0, 0.0, 0.0, 0.06628312350990566, 0.0, 0.37743722057381907, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08765206247905213, 0.0, 0.5218343219200703, 0.0, 0.0, 0.0, 0.41461579287108996, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6671900885778136, 0.0, 0.0, 0.0, 0.028262864260027967, 0.7909490968152504, 0.04511006432931489, 0.0, 0.0, 0.0, 0.4564818142302977, 0.35113439772465294, 0.0, 0.0, 0.0, 0.03776705986522304, 0.6129906961879193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42034814190539166, 0.0, 0.0, 0.21638060933821013, 0.0, 0.0, 0.0, 0.2526638526129318, 0.03715346614099295, 0.6358424325058142, 0.0, 0.0, 0.04152129953915335, 0.04614236094065058, 0.0, 0.0, 0.07015688762319212, 0.05740172594394938, 0.18351207696535793, 0.0, 0.0, 0.008864400691370657, 0.0, 0.8651249823989057, 0.407678395491645, 0.845679782901505, 0.0, 0.0, 0.0, 0.0, 0.4753420456230014, 1.6107190368773618, 0.0, 0.0, 0.0638102435424578, 0.24041718723542044, 0.5177820553003522, 0.44619181405158337, 0.4489609795607919, 0.0, 0.07301898938869086, 0.0902695074635508, 0.4268753686592877, 0.0, 0.0, 0.0, 0.16408594940133228, 0.0, 0.0, 0.6736439849461895, 0.031382854877976686, 0.034000045192845295, 0.0, 0.0, 0.8630676095443404, 0.3932053636261741, 0.0, 0.0, 0.0, 0.23747714078691665, 0.2179752640521007, 0.0, 0.059020425946533776, 0.23507519209688904, 0.0, 0.6184935143504988, 0.18475168643298573, 0.3255150538360457, 0.0, 0.0, 0.10559996237493433, 0.258855010743052, 0.0, 0.0, 0.0, 0.0, 0.5931453366617909, 0.0, 0.012879882724963071, 0.34456425817306313, 0.0, 0.10363371305008902, 0.0, 0.0, 0.13032437159832005, 0.0, 0.0, 0.9136517678658531, 0.11645026631438779, 0.0, 0.0, 1.006195673838974, 0.0, 0.49183662858528426, 0.5242175629103283, 0.0, 0.0, 0.5842253265103485, 0.09175087532086124, 0.0, 0.0, 0.6346229344595797, 0.8483440750859271, 0.0, 0.0, 0.43168828228976236, 1.1746392457903614, 0.9786548179849323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22466432888463708, 0.0, 0.0, 0.23383689086971213, 0.07868759200794351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5047067971651431, 0.0, 0.04466167239654772, 0.0, 0.18031282035336027, 0.06880056169195574, 0.0, 0.039760598424459205, 0.2052193360923158, 0.0, 0.1245954319549744, 0.0, 0.0, 0.3371869924989234, 0.10076818165067654, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21628428501390357, 0.41878760793751535, 0.0, 0.2743772579349106, 0.0, 0.0, 0.0, 0.23737336558608516, 0.0, 0.5609891075534331, 0.0, 0.0, 0.0, 0.0, 0.11206304892649907, 0.03770992438488979, 0.0, 0.0, 0.17383517173220803, 0.252655932334268, 0.0, 0.1585306005724301, 0.0, 0.1865298137691172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05971061253533409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15913149160221682, 0.0, 0.4030910369398146, 0.0, 0.32022434202130456, 0.23687245172165972, 0.0, 0.3599325265793679, 0.0, 0.4765964362382492, 0.0, 0.0, 0.0, 0.0, 0.6107120779202261, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.573260313114729, 0.5768180939544553, 0.0, 0.17499066339826644, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7201822598869417, 0.0, 0.0, 0.0, 0.0, 0.08725982492406978, 0.5051841445138487, 0.0, 0.0, 0.0, 0.2569811590076317, 0.3964028116744446, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31198143094865527, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42812611727229755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40187139064927635, 0.40436549376609193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5533450612442088, 0.3541481069684905, 0.0, 0.0, 0.0, 0.1801509251180122, 0.0, 0.0, 0.0, 0.08899148634361333, 0.0, 0.4004935508153472, 0.1664002225353232, 0.6660561411338942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5274372110974553, 0.0, 0.0, 0.11745018036413006, 0.03952272816910389, 0.654574176319042, 0.0, 0.0, 0.0, 0.0, 0.29642163335908794, 0.1274499525156547, 0.03345490004078419, 0.0, 0.0, 0.07653915037041417, 0.0, 0.02978363268713748, 0.15372448023095572, 0.03967384765850083, 0.06258104057589436, 0.0, 0.0, 0.044337981986606795, 0.07548283043416787, 0.0, 0.0, 0.0, 0.04735429836324166, 0.0, 0.0, 0.10863396357319674, 0.1797235339241024, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2053113311622715, 0.0, 0.0, 0.6339058613302375, 0.26881844559242324, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.047384735026192754, 0.0, 0.09394657688969027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41199723535819494, 0.0, 0.0, 0.19249813639294353, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4930042077083581, 0.01569004651529754, 0.0, 0.1886409972802957, 0.0, 0.6504613027483894, 0.0, 0.0, 0.0, 0.4491308783325726, 0.0, 0.710600063326777, 0.0, 0.0, 0.0, 0.0, 0.6392481732081897, 0.0, 0.0, 0.21075790790635565, 0.143953089400151, 0.04517962123405222, 0.12446587761719602, 0.011602660433066613, 0.0, 0.021141064793252704, 0.017873704948355638, 0.0, 0.010329409925325224, 0.21730544865000528, 0.03874493610862431, 0.0, 0.0, 0.0, 0.04329986579681618, 0.02617857620219232, 0.0, 0.0, 0.14198488087669728, 0.04624555904799935, 0.33624630483183143, 0.0, 0.18065873234459212, 0.001167765010724762, 0.0, 0.41156050949339695, 0.06138103351330629, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45223330011890606, 0.0, 0.42719481956226096, 0.0, 0.09390028378745956, 0.03159803909732349, 0.0, 0.0, 0.0, 0.14464053921535813, 0.0, 0.355579453624945, 0.27582314245244144, 0.0, 0.0, 0.4779189904559178, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6540512683692249, 0.16386729047093346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08595593354654336, 0.0868518036911852, 0.1436872279500763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08976911428083846, 0.0, 0.0, 0.0, 0.06561633035108058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0209499775034313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06082234101182034, 0.19585779687130894, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01528743323594608, 0.0, 0.0, 0.0, 0.011174280569280867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3922416239543676, 0.0, 0.5308327922857218, 0.38811106643298066, 0.0, 0.5861742359361938, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010357877371534024, 0.03335404406511716, 0.2121541431315042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022181366862053645, 0.0, 0.0, 0.3232792127186002, 0.21780290259329063, 0.0, 0.0, 0.6724740217065875, 0.33454771410797746, 0.0, 0.0, 0.0, 0.0, 0.1517011050211833, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20421012621580428, 0.0, 0.0, 0.0, 0.09554663093280256, 0.0, 0.9066194809744413, 0.0, 0.6530356828494477, 0.0, 0.0, 0.4162310356447254, 0.2004622291406719, 0.0, 0.06531630990369706, 0.0, 0.0, 0.22516333873266767, 0.08751639468488193, 0.31273858097846424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10565952264223451, 0.25213753900728575, 0.0, 0.13605306731422323, 0.0, 0.0, 0.20467409186458246, 0.0, 0.0, 0.0, 0.14062561725182204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06654804138060808, 0.04780210989364991, 0.3312858803709398, 0.0, 0.09056986573031543, 0.0, 0.0, 0.22425218794100035, 0.6978010919630194, 0.5644822411311934, 0.0, 0.0, 0.0, 0.0, 0.25245963204466193, 0.5714387205668741, 0.0, 0.7794778768245345, 0.5229243836029062, 0.6068223768633771, 0.0, 0.0, 0.3439063861094934, 0.0, 0.5825998753106167, 0.07738278703840655, 0.0, 0.0, 0.45088731313609587, 0.020026145653267647, 0.49371220470576765, 0.0, 0.1967497337446331, 0.9333687888834381, 0.2687116640308751, 0.3109501951809905, 0.0, 0.628995837333548, 0.6431918870734405, 0.0, 0.0, 0.4278019777568812, 0.0, 0.0, 0.3426634565816645, 0.0, 0.7642079896603969, 0.15720215492969136, 0.0, 0.5627301734522071, 0.0, 0.0, 0.03992028432745955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8902694418730629, 0.24785210085431258, 0.0, 0.0, 0.0, 0.19750384789873962, 0.0, 0.0, 0.24837057467209334, 0.0, 0.0, 0.06196917660673224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9196979517908253, 0.47523150409994275, 0.0, 0.05084636736867684, 0.30281629383729847, 0.0, 0.0, 0.0, 0.0, 0.7797375130571516, 0.0, 0.0, 0.009193696508787878, 0.7164774465476489, 0.37223214885126943, 0.36077619715591613]), 42), 42)),) +julia> @finch_code begin + Ct .= 0 + for j = _ + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +quote + Ct_lvl = ((ex.bodies[1]).bodies[1]).tns.bind.lvl + Ct_lvl_2 = Ct_lvl.lvl + Ct_lvl_3 = Ct_lvl_2.lvl + Ct_lvl_2_val = Ct_lvl_2.lvl.val + A_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl + A_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.ptr + A_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[1] + A_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[2] + A_lvl_val = A_lvl.lvl.val + B_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl + B_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.ptr + B_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[1] + B_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[2] + B_lvl_val = B_lvl.lvl.val + B_lvl.shape[1] == A_lvl.shape[2] || throw(DimensionMismatch("mismatched dimension limits ($(B_lvl.shape[1]) != $(A_lvl.shape[2]))")) + @warn "Performance Warning: non-concordant traversal of A[i, k] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + result = nothing + pos_stop = A_lvl.shape[1] * B_lvl.shape[2] + Finch.resize_if_smaller!(Ct_lvl_2_val, pos_stop) + Finch.fill_range!(Ct_lvl_2_val, 0.0, 1, pos_stop) + B_lvl_q = B_lvl_ptr[1] + B_lvl_q_stop = B_lvl_ptr[1 + 1] + if B_lvl_q < B_lvl_q_stop + B_lvl_i_stop = B_lvl_tbl2[B_lvl_q_stop - 1] + else + B_lvl_i_stop = 0 + end + phase_stop = min(B_lvl.shape[2], B_lvl_i_stop) + if phase_stop >= 1 + if B_lvl_tbl2[B_lvl_q] < 1 + B_lvl_q = Finch.scansearch(B_lvl_tbl2, 1, B_lvl_q, B_lvl_q_stop - 1) + end + while true + B_lvl_i = B_lvl_tbl2[B_lvl_q] + B_lvl_q_step = B_lvl_q + if B_lvl_tbl2[B_lvl_q] == B_lvl_i + B_lvl_q_step = Finch.scansearch(B_lvl_tbl2, B_lvl_i + 1, B_lvl_q, B_lvl_q_stop - 1) + end + if B_lvl_i < phase_stop + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + B_lvl_i + val = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + B_lvl_tbl1_2 = B_lvl_tbl1 + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2_2 = B_lvl_tbl2 + val_2 = B_lvl_val + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + A_lvl_ptr_2 = A_lvl_ptr + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1_2 = A_lvl_tbl1 + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2_2 = A_lvl_tbl2 + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + val_3 = A_lvl_val + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_9 = 1:Threads.nthreads() + phase_start_6 = max(1, 1 + fld(A_lvl.shape[1] * (-1 + i_9), Threads.nthreads())) + phase_stop_7 = min(A_lvl.shape[1], fld(A_lvl.shape[1] * i_9, Threads.nthreads())) + if phase_stop_7 >= phase_start_6 + for i_12 = phase_start_6:phase_stop_7 + Ct_lvl_2_q = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_12 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_2 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_2 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_2 = 0 + end + phase_stop_8 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_2) + if phase_stop_8 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_2 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_8 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_2 = B_lvl_tbl1[B_lvl_q_2] + phase_stop_9 = min(B_lvl_i_2, phase_stop_8, A_lvl_i) + if A_lvl_i == phase_stop_9 && B_lvl_i_2 == phase_stop_9 + B_lvl_2_val = B_lvl_val[B_lvl_q_2] + A_lvl_q_2 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_2 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_2 = 0 + end + phase_stop_10 = min(i_12, A_lvl_i_stop_2) + if phase_stop_10 >= i_12 + if A_lvl_tbl1[A_lvl_q] < i_12 + A_lvl_q_2 = Finch.scansearch(A_lvl_tbl1, i_12, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_2 = A_lvl_tbl1[A_lvl_q_2] + if A_lvl_i_2 < phase_stop_10 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + else + phase_stop_12 = min(A_lvl_i_2, phase_stop_10) + if A_lvl_i_2 == phase_stop_12 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_2 += 1 + elseif B_lvl_i_2 == phase_stop_9 + B_lvl_q_2 += 1 + elseif A_lvl_i == phase_stop_9 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_9 + 1 + end + end + end + end + end + Ct_lvl_2_val = val + B_lvl_tbl1 = B_lvl_tbl1_2 + B_lvl_tbl2 = B_lvl_tbl2_2 + B_lvl_val = val_2 + A_lvl_ptr = A_lvl_ptr_2 + A_lvl_tbl1 = A_lvl_tbl1_2 + A_lvl_tbl2 = A_lvl_tbl2_2 + A_lvl_val = val_3 + B_lvl_q = B_lvl_q_step + else + phase_stop_18 = min(B_lvl_i, phase_stop) + if B_lvl_i == phase_stop_18 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + phase_stop_18 + val_4 = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + B_lvl_tbl1_3 = B_lvl_tbl1 + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2_3 = B_lvl_tbl2 + val_5 = B_lvl_val + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + A_lvl_ptr_3 = A_lvl_ptr + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1_3 = A_lvl_tbl1 + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2_3 = A_lvl_tbl2 + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + val_6 = A_lvl_val + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_19 = 1:Threads.nthreads() + phase_start_21 = max(1, 1 + fld(A_lvl.shape[1] * (-1 + i_19), Threads.nthreads())) + phase_stop_23 = min(A_lvl.shape[1], fld(A_lvl.shape[1] * i_19, Threads.nthreads())) + if phase_stop_23 >= phase_start_21 + for i_22 = phase_start_21:phase_stop_23 + Ct_lvl_2_q_2 = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_22 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_2 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_2 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_2 = 0 + end + phase_stop_24 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_2) + if phase_stop_24 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_2 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_24 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_2 = B_lvl_tbl1[B_lvl_q_2] + phase_stop_25 = min(B_lvl_i_2, A_lvl_i, phase_stop_24) + if A_lvl_i == phase_stop_25 && B_lvl_i_2 == phase_stop_25 + B_lvl_2_val_3 = B_lvl_val[B_lvl_q_2] + A_lvl_q_4 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_4 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_4 = 0 + end + phase_stop_26 = min(i_22, A_lvl_i_stop_4) + if phase_stop_26 >= i_22 + if A_lvl_tbl1[A_lvl_q] < i_22 + A_lvl_q_4 = Finch.scansearch(A_lvl_tbl1, i_22, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_4 = A_lvl_tbl1[A_lvl_q_4] + if A_lvl_i_4 < phase_stop_26 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + else + phase_stop_28 = min(A_lvl_i_4, phase_stop_26) + if A_lvl_i_4 == phase_stop_28 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_2 += 1 + elseif B_lvl_i_2 == phase_stop_25 + B_lvl_q_2 += 1 + elseif A_lvl_i == phase_stop_25 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_25 + 1 + end + end + end + end + end + Ct_lvl_2_val = val_4 + B_lvl_tbl1 = B_lvl_tbl1_3 + B_lvl_tbl2 = B_lvl_tbl2_3 + B_lvl_val = val_5 + A_lvl_ptr = A_lvl_ptr_3 + A_lvl_tbl1 = A_lvl_tbl1_3 + A_lvl_tbl2 = A_lvl_tbl2_3 + A_lvl_val = val_6 + B_lvl_q = B_lvl_q_step + end + break + end + end + end + resize!(Ct_lvl_2_val, A_lvl.shape[1] * B_lvl.shape[2]) + result = (Ct = Tensor((DenseLevel){Int64}((DenseLevel){Int64}(Ct_lvl_3, A_lvl.shape[1]), B_lvl.shape[2])),) + result +end +julia> @finch begin + Ct .= 0 + for j = _ + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +(Ct = Tensor(Dense{Int64}(Dense{Int64}(Element{0.0, Float64, Int64}([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.70856859734722, 0.0, 0.0, 0.0, 0.0, 0.40683416426697433, 0.0, 0.0, 0.5116135014596547, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11665081191597346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815260724911955, 0.0, 0.012792585308225086, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23414421666297028, 0.0, 0.049360715516608075, 0.0, 0.17114666211685992, 0.0, 0.0, 0.016462013473269064, 0.10889740119368015, 0.0, 0.0, 0.0, 0.00577445736315422, 0.03699291283586617, 0.0, 0.054643694670994745, 0.0, 0.0, 0.0, 0.08011353665096754, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1586425298490503, 0.5528778968703049, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22133004329744424, 0.198518145547811, 0.038821202773921835, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18278114382714625, 0.1962641247777959, 0.33892974018746314, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013014677888550407, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6923776912647246, 0.0, 0.0, 0.0, 0.0, 0.03372992996786747, 0.0, 0.0, 0.0, 0.16613962476105457, 0.0, 0.21040813690603985, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12848588404681333, 0.44877210624067243, 0.0, 0.0, 0.00922143015221846, 0.0, 0.334367843457963, 0.0, 0.3344728737871945, 0.0, 0.007609184274463018, 0.0, 0.0783652603407444, 0.0, 0.008880000989208168, 0.6489339658812214, 0.0, 0.0, 0.4325454998052285, 0.24409841671258664, 0.0, 0.5857475361200408, 0.0, 0.006543890265716072, 0.0, 0.0, 0.0, 0.008425346810589148, 0.0, 0.0, 0.16622566484891482, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27228959440031625, 0.16863240701914226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16867493346874554, 0.42330165278515836, 0.0, 0.0, 0.6457463733709866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12409061480277174, 0.0, 0.14869580809977814, 0.0, 0.0, 0.0, 0.0, 0.14872661472502066, 0.5062551989716859, 0.06509123699478471, 0.0, 0.026886171078575067, 0.0, 0.3503937118835625, 0.0, 0.13397275360612415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07224002362266076, 0.0, 0.0, 0.014356019852976252, 0.2658719803791635, 0.13166323290951196, 0.0, 0.0, 0.0, 0.0, 0.020308821426184575, 0.025635708510132263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007980130035150811, 0.4090538905999397, 0.0, 0.0, 0.008918289569362796, 0.31893009912431536, 0.0, 0.0, 0.0, 0.009525001505142072, 0.0, 0.0, 0.013278407346240287, 0.021967736559129595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12136922915980554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.279484838006678, 0.2506790805406071, 0.0, 0.0, 0.10182245842840729, 0.0, 0.0, 0.5745756063332743, 0.0, 0.5180568530499269, 0.0, 0.19018853437509187, 0.2984452378932974, 0.0, 0.0, 0.0, 0.2447378349547936, 0.0, 0.48522550177156104, 0.5906428332019245, 0.0, 0.0, 0.756905839792281, 0.0, 0.4128008966782496, 0.7173389246941403, 0.0, 0.4558370427008323, 0.6596592042653375, 0.33014948101925834, 0.0, 0.0, 0.0, 0.0, 0.17629318543732997, 0.5676926328184286, 0.6751757284159318, 0.18305092512120225, 0.0, 0.5526511646802545, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6816784313112039, 0.0, 0.0, 0.0, 0.0, 0.4384965381282249, 0.0, 0.5249476708016299, 0.5514304573066787, 0.0, 0.0, 0.0278361825874964, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38619300372291254, 0.0, 0.5997636810868653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20257599957103595, 0.0, 0.10844521538627999, 0.0, 1.2226089817686585, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04229463432907405, 0.0, 0.0, 0.0, 0.0, 0.19946965003853376, 0.048044013732540566, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.036066754029278635, 0.0, 0.16231330637401103, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6062852141104628, 0.40481276616279827, 0.0, 0.0, 0.2539845497787725, 0.0, 0.0, 0.5691049256163461, 0.572636917198234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.026986118300396385, 0.0, 0.0, 0.2578776387682179, 0.2201878278780288, 0.3521973225117564, 0.49204059164875263, 0.20695213801377368, 0.3889153033164169, 0.12024801465448598, 0.5015222202004627, 0.0, 0.0, 0.0, 0.25511838171251733, 0.3395573400007252, 0.0, 0.14076018335068016, 0.006079069440287111, 0.3120090695218348, 0.02735798901473654, 0.23564550369089346, 0.0, 0.5201790908279286, 0.42693711336544043, 0.0, 0.0, 0.32942094250755716, 0.0, 0.0, 0.0, 0.0, 0.42208957099917227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27253622974111436, 0.20091824181940837, 0.0, 0.0, 0.014451665708310772, 0.0648234167882922, 0.0, 0.0, 0.0, 0.0, 0.43998592755822313, 0.200499194569837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10517105271620596, 0.4525079677294799, 0.0, 0.0, 0.0, 0.3992933674102322, 0.0, 0.6336315500314595, 0.5683248347075297, 0.0, 0.03911423543998137, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5618719435725092, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13717215169360888, 0.6023639264726295, 0.0, 0.16451246906039718, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2428824655720991, 0.0, 0.0, 0.0, 0.1307489669067023, 0.016490708613327746, 0.0, 0.5332965109179645, 0.0, 0.72835164827528, 0.5456771996801212, 0.20306669911566266, 0.8042852233793947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08986996958215537, 0.0, 0.09372773893140769, 0.2910950907401526, 0.0, 0.0, 0.11400417325056827, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15073719147466486, 0.0, 0.0062702773417321524, 0.0, 0.11018067217476075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03517847753788876, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012565073058016527, 0.0, 0.013511533493035587, 0.0, 0.0, 0.0, 0.1160032767395455, 0.3288776375406247, 0.009384967607632352, 0.013704851364696426, 0.0, 0.0, 0.0, 0.10679904950368906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5396299966121684, 0.0, 0.0, 0.0, 0.4120885163537877, 0.0, 0.0, 0.0, 0.0, 0.1374333138690627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.308835769631579, 0.0, 0.0, 0.2935183046180839, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21641442424094412, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16667427577804808, 0.0, 0.0, 0.0, 0.0, 0.1072149118509185, 0.0, 0.0, 0.13482790109225204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32702264908263307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17999984836884408, 0.0, 0.0, 0.0, 0.0, 0.17960575635997003, 0.0, 0.0, 0.0, 0.0, 0.08691531027429766, 0.0, 0.2893303365574358, 0.0, 0.21730094701941294, 0.07940443022930238, 0.0, 0.0, 0.01060094130869171, 0.0, 0.0, 0.21897146914670435, 0.3193413479843402, 0.11502306512986679, 0.16078084781731777, 0.0, 0.0, 0.029138065953053547, 0.12899773650646262, 0.0, 0.0, 0.0, 0.0, 0.35209003333845906, 0.0, 0.0, 0.42881832168904727, 0.0, 0.07551665376676242, 0.0, 0.10828996414360535, 0.0, 0.0, 0.0, 0.06175478190339224, 0.07317612880730584, 0.0, 0.4573326969055594, 0.13616590004764864, 0.1733951270114995, 0.0, 0.0, 0.0, 0.0, 0.004682414366264115, 0.0, 0.0, 0.014457116882680743, 0.477538324169661, 0.6690522150262886, 0.0, 0.0, 0.35074798851029604, 0.0, 0.6171788496488323, 0.0, 0.0, 0.0, 0.4598571719367441, 0.020424541646230095, 0.5014147387061915, 0.0, 0.0, 0.18767693158032267, 0.0, 0.08703683411330847, 0.0, 0.3388800152965881, 0.004390191394809013, 0.2131890539109787, 0.0, 0.43631258167790793, 0.0, 0.0, 0.0, 0.0, 0.011243656021000366, 0.0, 0.0, 0.004302223899340062, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3252052607129126, 0.10943362605720088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4600533876598886, 0.0, 0.0, 0.0, 0.6238654988124779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17327928789102656, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3007942289792172, 0.49763260068933673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1811653464990684, 0.0, 0.7596663692737102, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24115083978451807, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4132016125575898, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30212649550849624, 0.45064666528381636, 0.0, 0.0, 1.0295831107021076, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4924133218118872, 0.0, 0.0, 0.6123145833841932, 1.0074416855456372, 0.8974627446983252, 0.0, 0.0, 0.39387754214687604, 0.6443411524082342, 0.0, 0.6700619016074552, 0.0, 0.0, 0.4377096051189058, 0.3977735630344448, 0.0, 0.0, 0.0, 0.054395363429349475, 0.46912699721752377, 0.09054692381782561, 0.0, 0.6320539215802606, 0.33393006705956935, 0.614294006786966, 0.0, 0.0, 0.3986581318369477, 0.630705395059908, 0.0, 0.4266820903955834, 0.6230830913144384, 0.0, 1.0831760350408295, 0.21088758697054613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7135848094649876, 0.06891111903866448, 0.0, 0.0, 0.0, 0.32920907856114856, 0.5438621275845595, 0.0, 0.4139962279186794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13809169802867077, 0.0, 0.14849341459532064, 0.0, 0.0, 0.0, 0.5533499952652239, 0.0, 0.10314194807289108, 0.15061800177708015, 0.0, 0.5514890096422472, 0.23190068111070972, 0.0, 0.701822721136001, 0.6294877235260824, 0.0, 0.0, 0.0, 1.1059687974440209, 0.0, 0.0, 0.0, 0.96169924491107, 0.0, 0.0, 0.0, 0.0, 0.06628312350990566, 0.0, 0.37743722057381907, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08765206247905213, 0.0, 0.5218343219200703, 0.0, 0.0, 0.0, 0.41461579287108996, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6671900885778136, 0.0, 0.0, 0.0, 0.028262864260027967, 0.7909490968152504, 0.04511006432931489, 0.0, 0.0, 0.0, 0.4564818142302977, 0.35113439772465294, 0.0, 0.0, 0.0, 0.03776705986522304, 0.6129906961879193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42034814190539166, 0.0, 0.0, 0.21638060933821013, 0.0, 0.0, 0.0, 0.2526638526129318, 0.03715346614099295, 0.6358424325058142, 0.0, 0.0, 0.04152129953915335, 0.04614236094065058, 0.0, 0.0, 0.07015688762319212, 0.05740172594394938, 0.18351207696535793, 0.0, 0.0, 0.008864400691370657, 0.0, 0.8651249823989057, 0.407678395491645, 0.845679782901505, 0.0, 0.0, 0.0, 0.0, 0.4753420456230014, 1.6107190368773618, 0.0, 0.0, 0.0638102435424578, 0.24041718723542044, 0.5177820553003522, 0.44619181405158337, 0.4489609795607919, 0.0, 0.07301898938869086, 0.0902695074635508, 0.4268753686592877, 0.0, 0.0, 0.0, 0.16408594940133228, 0.0, 0.0, 0.6736439849461895, 0.031382854877976686, 0.034000045192845295, 0.0, 0.0, 0.8630676095443404, 0.3932053636261741, 0.0, 0.0, 0.0, 0.23747714078691665, 0.2179752640521007, 0.0, 0.059020425946533776, 0.23507519209688904, 0.0, 0.6184935143504988, 0.18475168643298573, 0.3255150538360457, 0.0, 0.0, 0.10559996237493433, 0.258855010743052, 0.0, 0.0, 0.0, 0.0, 0.5931453366617909, 0.0, 0.012879882724963071, 0.34456425817306313, 0.0, 0.10363371305008902, 0.0, 0.0, 0.13032437159832005, 0.0, 0.0, 0.9136517678658531, 0.11645026631438779, 0.0, 0.0, 1.006195673838974, 0.0, 0.49183662858528426, 0.5242175629103283, 0.0, 0.0, 0.5842253265103485, 0.09175087532086124, 0.0, 0.0, 0.6346229344595797, 0.8483440750859271, 0.0, 0.0, 0.43168828228976236, 1.1746392457903614, 0.9786548179849323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22466432888463708, 0.0, 0.0, 0.23383689086971213, 0.07868759200794351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5047067971651431, 0.0, 0.04466167239654772, 0.0, 0.18031282035336027, 0.06880056169195574, 0.0, 0.039760598424459205, 0.2052193360923158, 0.0, 0.1245954319549744, 0.0, 0.0, 0.3371869924989234, 0.10076818165067654, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21628428501390357, 0.41878760793751535, 0.0, 0.2743772579349106, 0.0, 0.0, 0.0, 0.23737336558608516, 0.0, 0.5609891075534331, 0.0, 0.0, 0.0, 0.0, 0.11206304892649907, 0.03770992438488979, 0.0, 0.0, 0.17383517173220803, 0.252655932334268, 0.0, 0.1585306005724301, 0.0, 0.1865298137691172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05971061253533409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15913149160221682, 0.0, 0.4030910369398146, 0.0, 0.32022434202130456, 0.23687245172165972, 0.0, 0.3599325265793679, 0.0, 0.4765964362382492, 0.0, 0.0, 0.0, 0.0, 0.6107120779202261, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.573260313114729, 0.5768180939544553, 0.0, 0.17499066339826644, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7201822598869417, 0.0, 0.0, 0.0, 0.0, 0.08725982492406978, 0.5051841445138487, 0.0, 0.0, 0.0, 0.2569811590076317, 0.3964028116744446, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31198143094865527, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42812611727229755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40187139064927635, 0.40436549376609193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5533450612442088, 0.3541481069684905, 0.0, 0.0, 0.0, 0.1801509251180122, 0.0, 0.0, 0.0, 0.08899148634361333, 0.0, 0.4004935508153472, 0.1664002225353232, 0.6660561411338942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5274372110974553, 0.0, 0.0, 0.11745018036413006, 0.03952272816910389, 0.654574176319042, 0.0, 0.0, 0.0, 0.0, 0.29642163335908794, 0.1274499525156547, 0.03345490004078419, 0.0, 0.0, 0.07653915037041417, 0.0, 0.02978363268713748, 0.15372448023095572, 0.03967384765850083, 0.06258104057589436, 0.0, 0.0, 0.044337981986606795, 0.07548283043416787, 0.0, 0.0, 0.0, 0.04735429836324166, 0.0, 0.0, 0.10863396357319674, 0.1797235339241024, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2053113311622715, 0.0, 0.0, 0.6339058613302375, 0.26881844559242324, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.047384735026192754, 0.0, 0.09394657688969027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41199723535819494, 0.0, 0.0, 0.19249813639294353, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4930042077083581, 0.01569004651529754, 0.0, 0.1886409972802957, 0.0, 0.6504613027483894, 0.0, 0.0, 0.0, 0.4491308783325726, 0.0, 0.710600063326777, 0.0, 0.0, 0.0, 0.0, 0.6392481732081897, 0.0, 0.0, 0.21075790790635565, 0.143953089400151, 0.04517962123405222, 0.12446587761719602, 0.011602660433066613, 0.0, 0.021141064793252704, 0.017873704948355638, 0.0, 0.010329409925325224, 0.21730544865000528, 0.03874493610862431, 0.0, 0.0, 0.0, 0.04329986579681618, 0.02617857620219232, 0.0, 0.0, 0.14198488087669728, 0.04624555904799935, 0.33624630483183143, 0.0, 0.18065873234459212, 0.001167765010724762, 0.0, 0.41156050949339695, 0.06138103351330629, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45223330011890606, 0.0, 0.42719481956226096, 0.0, 0.09390028378745956, 0.03159803909732349, 0.0, 0.0, 0.0, 0.14464053921535813, 0.0, 0.355579453624945, 0.27582314245244144, 0.0, 0.0, 0.4779189904559178, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6540512683692249, 0.16386729047093346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08595593354654336, 0.0868518036911852, 0.1436872279500763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08976911428083846, 0.0, 0.0, 0.0, 0.06561633035108058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0209499775034313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06082234101182034, 0.19585779687130894, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01528743323594608, 0.0, 0.0, 0.0, 0.011174280569280867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3922416239543676, 0.0, 0.5308327922857218, 0.38811106643298066, 0.0, 0.5861742359361938, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010357877371534024, 0.03335404406511716, 0.2121541431315042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022181366862053645, 0.0, 0.0, 0.3232792127186002, 0.21780290259329063, 0.0, 0.0, 0.6724740217065875, 0.33454771410797746, 0.0, 0.0, 0.0, 0.0, 0.1517011050211833, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20421012621580428, 0.0, 0.0, 0.0, 0.09554663093280256, 0.0, 0.9066194809744413, 0.0, 0.6530356828494477, 0.0, 0.0, 0.4162310356447254, 0.2004622291406719, 0.0, 0.06531630990369706, 0.0, 0.0, 0.22516333873266767, 0.08751639468488193, 0.31273858097846424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10565952264223451, 0.25213753900728575, 0.0, 0.13605306731422323, 0.0, 0.0, 0.20467409186458246, 0.0, 0.0, 0.0, 0.14062561725182204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06654804138060808, 0.04780210989364991, 0.3312858803709398, 0.0, 0.09056986573031543, 0.0, 0.0, 0.22425218794100035, 0.6978010919630194, 0.5644822411311934, 0.0, 0.0, 0.0, 0.0, 0.25245963204466193, 0.5714387205668741, 0.0, 0.7794778768245345, 0.5229243836029062, 0.6068223768633771, 0.0, 0.0, 0.3439063861094934, 0.0, 0.5825998753106167, 0.07738278703840655, 0.0, 0.0, 0.45088731313609587, 0.020026145653267647, 0.49371220470576765, 0.0, 0.1967497337446331, 0.9333687888834381, 0.2687116640308751, 0.3109501951809905, 0.0, 0.628995837333548, 0.6431918870734405, 0.0, 0.0, 0.4278019777568812, 0.0, 0.0, 0.3426634565816645, 0.0, 0.7642079896603969, 0.15720215492969136, 0.0, 0.5627301734522071, 0.0, 0.0, 0.03992028432745955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8902694418730629, 0.24785210085431258, 0.0, 0.0, 0.0, 0.19750384789873962, 0.0, 0.0, 0.24837057467209334, 0.0, 0.0, 0.06196917660673224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9196979517908253, 0.47523150409994275, 0.0, 0.05084636736867684, 0.30281629383729847, 0.0, 0.0, 0.0, 0.0, 0.7797375130571516, 0.0, 0.0, 0.009193696508787878, 0.7164774465476489, 0.37223214885126943, 0.36077619715591613]), 42), 42)),) +julia> @finch_code begin + Ct .= 0 + for j = parallel(_) + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +quote + Ct_lvl = ((ex.bodies[1]).bodies[1]).tns.bind.lvl + Ct_lvl_2 = Ct_lvl.lvl + Ct_lvl_3 = Ct_lvl_2.lvl + Ct_lvl_2_val = Ct_lvl_2.lvl.val + A_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl + A_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.ptr + A_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[1] + A_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[1]).tns.bind.lvl.tbl[2] + A_lvl_val = A_lvl.lvl.val + B_lvl = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl + B_lvl_ptr = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.ptr + B_lvl_tbl1 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[1] + B_lvl_tbl2 = (((ex.bodies[1]).bodies[2]).body.body.body.rhs.args[2]).tns.bind.lvl.tbl[2] + B_lvl_val = B_lvl.lvl.val + B_lvl.shape[1] == A_lvl.shape[2] || throw(DimensionMismatch("mismatched dimension limits ($(B_lvl.shape[1]) != $(A_lvl.shape[2]))")) + @warn "Performance Warning: non-concordant traversal of A[i, k] (hint: most arrays prefer column major or first index fast, run in fast mode to ignore this warning)" + result = nothing + pos_stop = A_lvl.shape[1] * B_lvl.shape[2] + Finch.resize_if_smaller!(Ct_lvl_2_val, pos_stop) + Finch.fill_range!(Ct_lvl_2_val, 0.0, 1, pos_stop) + val = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + B_lvl_ptr = (Finch).moveto(B_lvl_ptr, CPU(Threads.nthreads())) + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2 = (Finch).moveto(B_lvl_tbl2, CPU(Threads.nthreads())) + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_4 = 1:Threads.nthreads() + B_lvl_q = B_lvl_ptr[1] + B_lvl_q_stop = B_lvl_ptr[1 + 1] + if B_lvl_q < B_lvl_q_stop + B_lvl_i_stop = B_lvl_tbl2[B_lvl_q_stop - 1] + else + B_lvl_i_stop = 0 + end + phase_start_2 = max(1, 1 + fld(B_lvl.shape[2] * (i_4 + -1), Threads.nthreads())) + phase_stop_2 = min(B_lvl.shape[2], B_lvl_i_stop, fld(B_lvl.shape[2] * i_4, Threads.nthreads())) + if phase_stop_2 >= phase_start_2 + if B_lvl_tbl2[B_lvl_q] < phase_start_2 + B_lvl_q = Finch.scansearch(B_lvl_tbl2, phase_start_2, B_lvl_q, B_lvl_q_stop - 1) + end + while true + B_lvl_i = B_lvl_tbl2[B_lvl_q] + B_lvl_q_step = B_lvl_q + if B_lvl_tbl2[B_lvl_q] == B_lvl_i + B_lvl_q_step = Finch.scansearch(B_lvl_tbl2, B_lvl_i + 1, B_lvl_q, B_lvl_q_stop - 1) + end + if B_lvl_i < phase_stop_2 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + B_lvl_i + val_4 = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + A_lvl_ptr_3 = A_lvl_ptr + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1_3 = A_lvl_tbl1 + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2_3 = A_lvl_tbl2 + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + val_5 = A_lvl_val + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + B_lvl_ptr_3 = B_lvl_ptr + B_lvl_tbl1_3 = B_lvl_tbl1 + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2_3 = B_lvl_tbl2 + val_6 = B_lvl_val + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_10 = 1:Threads.nthreads() + phase_start_7 = max(1, 1 + fld(A_lvl.shape[1] * (-1 + i_10), Threads.nthreads())) + phase_stop_8 = min(A_lvl.shape[1], fld(A_lvl.shape[1] * i_10, Threads.nthreads())) + if phase_stop_8 >= phase_start_7 + for i_13 = phase_start_7:phase_stop_8 + Ct_lvl_2_q = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_13 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_3 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_3 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_3 = 0 + end + phase_stop_9 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_3) + if phase_stop_9 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_3 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_9 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_3 = B_lvl_tbl1[B_lvl_q_3] + phase_stop_10 = min(B_lvl_i_3, phase_stop_9, A_lvl_i) + if A_lvl_i == phase_stop_10 && B_lvl_i_3 == phase_stop_10 + B_lvl_2_val = B_lvl_val[B_lvl_q_3] + A_lvl_q_2 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_2 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_2 = 0 + end + phase_stop_11 = min(i_13, A_lvl_i_stop_2) + if phase_stop_11 >= i_13 + if A_lvl_tbl1[A_lvl_q] < i_13 + A_lvl_q_2 = Finch.scansearch(A_lvl_tbl1, i_13, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_2 = A_lvl_tbl1[A_lvl_q_2] + if A_lvl_i_2 < phase_stop_11 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + else + phase_stop_13 = min(A_lvl_i_2, phase_stop_11) + if A_lvl_i_2 == phase_stop_13 + A_lvl_2_val = A_lvl_val[A_lvl_q_2] + Ct_lvl_2_val[Ct_lvl_2_q] += B_lvl_2_val * A_lvl_2_val + A_lvl_q_2 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_3 += 1 + elseif B_lvl_i_3 == phase_stop_10 + B_lvl_q_3 += 1 + elseif A_lvl_i == phase_stop_10 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_10 + 1 + end + end + end + end + end + Ct_lvl_2_val = val_4 + A_lvl_ptr = A_lvl_ptr_3 + A_lvl_tbl1 = A_lvl_tbl1_3 + A_lvl_tbl2 = A_lvl_tbl2_3 + A_lvl_val = val_5 + B_lvl_ptr = B_lvl_ptr_3 + B_lvl_tbl1 = B_lvl_tbl1_3 + B_lvl_tbl2 = B_lvl_tbl2_3 + B_lvl_val = val_6 + B_lvl_q = B_lvl_q_step + else + phase_stop_19 = min(B_lvl_i, phase_stop_2) + if B_lvl_i == phase_stop_19 + Ct_lvl_q = (1 - 1) * B_lvl.shape[2] + phase_stop_19 + val_7 = Ct_lvl_2_val + Ct_lvl_2_val = (Finch).moveto(Ct_lvl_2_val, CPU(Threads.nthreads())) + A_lvl_ptr_4 = A_lvl_ptr + A_lvl_ptr = (Finch).moveto(A_lvl_ptr, CPU(Threads.nthreads())) + A_lvl_tbl1_4 = A_lvl_tbl1 + A_lvl_tbl1 = (Finch).moveto(A_lvl_tbl1, CPU(Threads.nthreads())) + A_lvl_tbl2_4 = A_lvl_tbl2 + A_lvl_tbl2 = (Finch).moveto(A_lvl_tbl2, CPU(Threads.nthreads())) + val_8 = A_lvl_val + A_lvl_val = (Finch).moveto(A_lvl_val, CPU(Threads.nthreads())) + B_lvl_ptr_4 = B_lvl_ptr + B_lvl_tbl1_4 = B_lvl_tbl1 + B_lvl_tbl1 = (Finch).moveto(B_lvl_tbl1, CPU(Threads.nthreads())) + B_lvl_tbl2_4 = B_lvl_tbl2 + val_9 = B_lvl_val + B_lvl_val = (Finch).moveto(B_lvl_val, CPU(Threads.nthreads())) + Threads.@threads for i_20 = 1:Threads.nthreads() + phase_start_22 = max(1, 1 + fld(A_lvl.shape[1] * (-1 + i_20), Threads.nthreads())) + phase_stop_24 = min(A_lvl.shape[1], fld(A_lvl.shape[1] * i_20, Threads.nthreads())) + if phase_stop_24 >= phase_start_22 + for i_23 = phase_start_22:phase_stop_24 + Ct_lvl_2_q_2 = (Ct_lvl_q - 1) * A_lvl.shape[1] + i_23 + A_lvl_q = A_lvl_ptr[1] + A_lvl_q_stop = A_lvl_ptr[1 + 1] + if A_lvl_q < A_lvl_q_stop + A_lvl_i_stop = A_lvl_tbl2[A_lvl_q_stop - 1] + else + A_lvl_i_stop = 0 + end + B_lvl_q_3 = B_lvl_q + if B_lvl_q < B_lvl_q_step + B_lvl_i_stop_3 = B_lvl_tbl1[B_lvl_q_step - 1] + else + B_lvl_i_stop_3 = 0 + end + phase_stop_25 = min(B_lvl.shape[1], A_lvl_i_stop, B_lvl_i_stop_3) + if phase_stop_25 >= 1 + k = 1 + if A_lvl_tbl2[A_lvl_q] < 1 + A_lvl_q = Finch.scansearch(A_lvl_tbl2, 1, A_lvl_q, A_lvl_q_stop - 1) + end + if B_lvl_tbl1[B_lvl_q] < 1 + B_lvl_q_3 = Finch.scansearch(B_lvl_tbl1, 1, B_lvl_q, B_lvl_q_step - 1) + end + while k <= phase_stop_25 + A_lvl_i = A_lvl_tbl2[A_lvl_q] + A_lvl_q_step = A_lvl_q + if A_lvl_tbl2[A_lvl_q] == A_lvl_i + A_lvl_q_step = Finch.scansearch(A_lvl_tbl2, A_lvl_i + 1, A_lvl_q, A_lvl_q_stop - 1) + end + B_lvl_i_3 = B_lvl_tbl1[B_lvl_q_3] + phase_stop_26 = min(B_lvl_i_3, A_lvl_i, phase_stop_25) + if A_lvl_i == phase_stop_26 && B_lvl_i_3 == phase_stop_26 + B_lvl_2_val_3 = B_lvl_val[B_lvl_q_3] + A_lvl_q_4 = A_lvl_q + if A_lvl_q < A_lvl_q_step + A_lvl_i_stop_4 = A_lvl_tbl1[A_lvl_q_step - 1] + else + A_lvl_i_stop_4 = 0 + end + phase_stop_27 = min(i_23, A_lvl_i_stop_4) + if phase_stop_27 >= i_23 + if A_lvl_tbl1[A_lvl_q] < i_23 + A_lvl_q_4 = Finch.scansearch(A_lvl_tbl1, i_23, A_lvl_q, A_lvl_q_step - 1) + end + while true + A_lvl_i_4 = A_lvl_tbl1[A_lvl_q_4] + if A_lvl_i_4 < phase_stop_27 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + else + phase_stop_29 = min(A_lvl_i_4, phase_stop_27) + if A_lvl_i_4 == phase_stop_29 + A_lvl_2_val_2 = A_lvl_val[A_lvl_q_4] + Ct_lvl_2_val[Ct_lvl_2_q_2] += B_lvl_2_val_3 * A_lvl_2_val_2 + A_lvl_q_4 += 1 + end + break + end + end + end + A_lvl_q = A_lvl_q_step + B_lvl_q_3 += 1 + elseif B_lvl_i_3 == phase_stop_26 + B_lvl_q_3 += 1 + elseif A_lvl_i == phase_stop_26 + A_lvl_q = A_lvl_q_step + end + k = phase_stop_26 + 1 + end + end + end + end + end + Ct_lvl_2_val = val_7 + A_lvl_ptr = A_lvl_ptr_4 + A_lvl_tbl1 = A_lvl_tbl1_4 + A_lvl_tbl2 = A_lvl_tbl2_4 + A_lvl_val = val_8 + B_lvl_ptr = B_lvl_ptr_4 + B_lvl_tbl1 = B_lvl_tbl1_4 + B_lvl_tbl2 = B_lvl_tbl2_4 + B_lvl_val = val_9 + B_lvl_q = B_lvl_q_step + end + break + end + end + end + end + resize!(val, A_lvl.shape[1] * B_lvl.shape[2]) + result = (Ct = Tensor((DenseLevel){Int64}((DenseLevel){Int64}(Ct_lvl_3, A_lvl.shape[1]), B_lvl.shape[2])),) + result +end +julia> @finch begin + Ct .= 0 + for j = parallel(_) + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end +(Ct = Tensor(Dense{Int64}(Dense{Int64}(Element{0.0, Float64, Int64}([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.70856859734722, 0.0, 0.0, 0.0, 0.0, 0.40683416426697433, 0.0, 0.0, 0.5116135014596547, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11665081191597346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815260724911955, 0.0, 0.012792585308225086, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23414421666297028, 0.0, 0.049360715516608075, 0.0, 0.17114666211685992, 0.0, 0.0, 0.016462013473269064, 0.10889740119368015, 0.0, 0.0, 0.0, 0.00577445736315422, 0.03699291283586617, 0.0, 0.054643694670994745, 0.0, 0.0, 0.0, 0.08011353665096754, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1586425298490503, 0.5528778968703049, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22133004329744424, 0.198518145547811, 0.038821202773921835, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18278114382714625, 0.1962641247777959, 0.33892974018746314, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013014677888550407, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6923776912647246, 0.0, 0.0, 0.0, 0.0, 0.03372992996786747, 0.0, 0.0, 0.0, 0.16613962476105457, 0.0, 0.21040813690603985, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12848588404681333, 0.44877210624067243, 0.0, 0.0, 0.00922143015221846, 0.0, 0.334367843457963, 0.0, 0.3344728737871945, 0.0, 0.007609184274463018, 0.0, 0.0783652603407444, 0.0, 0.008880000989208168, 0.6489339658812214, 0.0, 0.0, 0.4325454998052285, 0.24409841671258664, 0.0, 0.5857475361200408, 0.0, 0.006543890265716072, 0.0, 0.0, 0.0, 0.008425346810589148, 0.0, 0.0, 0.16622566484891482, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27228959440031625, 0.16863240701914226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16867493346874554, 0.42330165278515836, 0.0, 0.0, 0.6457463733709866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12409061480277174, 0.0, 0.14869580809977814, 0.0, 0.0, 0.0, 0.0, 0.14872661472502066, 0.5062551989716859, 0.06509123699478471, 0.0, 0.026886171078575067, 0.0, 0.3503937118835625, 0.0, 0.13397275360612415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07224002362266076, 0.0, 0.0, 0.014356019852976252, 0.2658719803791635, 0.13166323290951196, 0.0, 0.0, 0.0, 0.0, 0.020308821426184575, 0.025635708510132263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007980130035150811, 0.4090538905999397, 0.0, 0.0, 0.008918289569362796, 0.31893009912431536, 0.0, 0.0, 0.0, 0.009525001505142072, 0.0, 0.0, 0.013278407346240287, 0.021967736559129595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12136922915980554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.279484838006678, 0.2506790805406071, 0.0, 0.0, 0.10182245842840729, 0.0, 0.0, 0.5745756063332743, 0.0, 0.5180568530499269, 0.0, 0.19018853437509187, 0.2984452378932974, 0.0, 0.0, 0.0, 0.2447378349547936, 0.0, 0.48522550177156104, 0.5906428332019245, 0.0, 0.0, 0.756905839792281, 0.0, 0.4128008966782496, 0.7173389246941403, 0.0, 0.4558370427008323, 0.6596592042653375, 0.33014948101925834, 0.0, 0.0, 0.0, 0.0, 0.17629318543732997, 0.5676926328184286, 0.6751757284159318, 0.18305092512120225, 0.0, 0.5526511646802545, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6816784313112039, 0.0, 0.0, 0.0, 0.0, 0.4384965381282249, 0.0, 0.5249476708016299, 0.5514304573066787, 0.0, 0.0, 0.0278361825874964, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38619300372291254, 0.0, 0.5997636810868653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20257599957103595, 0.0, 0.10844521538627999, 0.0, 1.2226089817686585, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04229463432907405, 0.0, 0.0, 0.0, 0.0, 0.19946965003853376, 0.048044013732540566, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.036066754029278635, 0.0, 0.16231330637401103, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6062852141104628, 0.40481276616279827, 0.0, 0.0, 0.2539845497787725, 0.0, 0.0, 0.5691049256163461, 0.572636917198234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.026986118300396385, 0.0, 0.0, 0.2578776387682179, 0.2201878278780288, 0.3521973225117564, 0.49204059164875263, 0.20695213801377368, 0.3889153033164169, 0.12024801465448598, 0.5015222202004627, 0.0, 0.0, 0.0, 0.25511838171251733, 0.3395573400007252, 0.0, 0.14076018335068016, 0.006079069440287111, 0.3120090695218348, 0.02735798901473654, 0.23564550369089346, 0.0, 0.5201790908279286, 0.42693711336544043, 0.0, 0.0, 0.32942094250755716, 0.0, 0.0, 0.0, 0.0, 0.42208957099917227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27253622974111436, 0.20091824181940837, 0.0, 0.0, 0.014451665708310772, 0.0648234167882922, 0.0, 0.0, 0.0, 0.0, 0.43998592755822313, 0.200499194569837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10517105271620596, 0.4525079677294799, 0.0, 0.0, 0.0, 0.3992933674102322, 0.0, 0.6336315500314595, 0.5683248347075297, 0.0, 0.03911423543998137, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5618719435725092, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13717215169360888, 0.6023639264726295, 0.0, 0.16451246906039718, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2428824655720991, 0.0, 0.0, 0.0, 0.1307489669067023, 0.016490708613327746, 0.0, 0.5332965109179645, 0.0, 0.72835164827528, 0.5456771996801212, 0.20306669911566266, 0.8042852233793947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08986996958215537, 0.0, 0.09372773893140769, 0.2910950907401526, 0.0, 0.0, 0.11400417325056827, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15073719147466486, 0.0, 0.0062702773417321524, 0.0, 0.11018067217476075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03517847753788876, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012565073058016527, 0.0, 0.013511533493035587, 0.0, 0.0, 0.0, 0.1160032767395455, 0.3288776375406247, 0.009384967607632352, 0.013704851364696426, 0.0, 0.0, 0.0, 0.10679904950368906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5396299966121684, 0.0, 0.0, 0.0, 0.4120885163537877, 0.0, 0.0, 0.0, 0.0, 0.1374333138690627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.308835769631579, 0.0, 0.0, 0.2935183046180839, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21641442424094412, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16667427577804808, 0.0, 0.0, 0.0, 0.0, 0.1072149118509185, 0.0, 0.0, 0.13482790109225204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32702264908263307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17999984836884408, 0.0, 0.0, 0.0, 0.0, 0.17960575635997003, 0.0, 0.0, 0.0, 0.0, 0.08691531027429766, 0.0, 0.2893303365574358, 0.0, 0.21730094701941294, 0.07940443022930238, 0.0, 0.0, 0.01060094130869171, 0.0, 0.0, 0.21897146914670435, 0.3193413479843402, 0.11502306512986679, 0.16078084781731777, 0.0, 0.0, 0.029138065953053547, 0.12899773650646262, 0.0, 0.0, 0.0, 0.0, 0.35209003333845906, 0.0, 0.0, 0.42881832168904727, 0.0, 0.07551665376676242, 0.0, 0.10828996414360535, 0.0, 0.0, 0.0, 0.06175478190339224, 0.07317612880730584, 0.0, 0.4573326969055594, 0.13616590004764864, 0.1733951270114995, 0.0, 0.0, 0.0, 0.0, 0.004682414366264115, 0.0, 0.0, 0.014457116882680743, 0.477538324169661, 0.6690522150262886, 0.0, 0.0, 0.35074798851029604, 0.0, 0.6171788496488323, 0.0, 0.0, 0.0, 0.4598571719367441, 0.020424541646230095, 0.5014147387061915, 0.0, 0.0, 0.18767693158032267, 0.0, 0.08703683411330847, 0.0, 0.3388800152965881, 0.004390191394809013, 0.2131890539109787, 0.0, 0.43631258167790793, 0.0, 0.0, 0.0, 0.0, 0.011243656021000366, 0.0, 0.0, 0.004302223899340062, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3252052607129126, 0.10943362605720088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4600533876598886, 0.0, 0.0, 0.0, 0.6238654988124779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17327928789102656, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3007942289792172, 0.49763260068933673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1811653464990684, 0.0, 0.7596663692737102, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24115083978451807, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4132016125575898, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30212649550849624, 0.45064666528381636, 0.0, 0.0, 1.0295831107021076, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4924133218118872, 0.0, 0.0, 0.6123145833841932, 1.0074416855456372, 0.8974627446983252, 0.0, 0.0, 0.39387754214687604, 0.6443411524082342, 0.0, 0.6700619016074552, 0.0, 0.0, 0.4377096051189058, 0.3977735630344448, 0.0, 0.0, 0.0, 0.054395363429349475, 0.46912699721752377, 0.09054692381782561, 0.0, 0.6320539215802606, 0.33393006705956935, 0.614294006786966, 0.0, 0.0, 0.3986581318369477, 0.630705395059908, 0.0, 0.4266820903955834, 0.6230830913144384, 0.0, 1.0831760350408295, 0.21088758697054613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7135848094649876, 0.06891111903866448, 0.0, 0.0, 0.0, 0.32920907856114856, 0.5438621275845595, 0.0, 0.4139962279186794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13809169802867077, 0.0, 0.14849341459532064, 0.0, 0.0, 0.0, 0.5533499952652239, 0.0, 0.10314194807289108, 0.15061800177708015, 0.0, 0.5514890096422472, 0.23190068111070972, 0.0, 0.701822721136001, 0.6294877235260824, 0.0, 0.0, 0.0, 1.1059687974440209, 0.0, 0.0, 0.0, 0.96169924491107, 0.0, 0.0, 0.0, 0.0, 0.06628312350990566, 0.0, 0.37743722057381907, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08765206247905213, 0.0, 0.5218343219200703, 0.0, 0.0, 0.0, 0.41461579287108996, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6671900885778136, 0.0, 0.0, 0.0, 0.028262864260027967, 0.7909490968152504, 0.04511006432931489, 0.0, 0.0, 0.0, 0.4564818142302977, 0.35113439772465294, 0.0, 0.0, 0.0, 0.03776705986522304, 0.6129906961879193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42034814190539166, 0.0, 0.0, 0.21638060933821013, 0.0, 0.0, 0.0, 0.2526638526129318, 0.03715346614099295, 0.6358424325058142, 0.0, 0.0, 0.04152129953915335, 0.04614236094065058, 0.0, 0.0, 0.07015688762319212, 0.05740172594394938, 0.18351207696535793, 0.0, 0.0, 0.008864400691370657, 0.0, 0.8651249823989057, 0.407678395491645, 0.845679782901505, 0.0, 0.0, 0.0, 0.0, 0.4753420456230014, 1.6107190368773618, 0.0, 0.0, 0.0638102435424578, 0.24041718723542044, 0.5177820553003522, 0.44619181405158337, 0.4489609795607919, 0.0, 0.07301898938869086, 0.0902695074635508, 0.4268753686592877, 0.0, 0.0, 0.0, 0.16408594940133228, 0.0, 0.0, 0.6736439849461895, 0.031382854877976686, 0.034000045192845295, 0.0, 0.0, 0.8630676095443404, 0.3932053636261741, 0.0, 0.0, 0.0, 0.23747714078691665, 0.2179752640521007, 0.0, 0.059020425946533776, 0.23507519209688904, 0.0, 0.6184935143504988, 0.18475168643298573, 0.3255150538360457, 0.0, 0.0, 0.10559996237493433, 0.258855010743052, 0.0, 0.0, 0.0, 0.0, 0.5931453366617909, 0.0, 0.012879882724963071, 0.34456425817306313, 0.0, 0.10363371305008902, 0.0, 0.0, 0.13032437159832005, 0.0, 0.0, 0.9136517678658531, 0.11645026631438779, 0.0, 0.0, 1.006195673838974, 0.0, 0.49183662858528426, 0.5242175629103283, 0.0, 0.0, 0.5842253265103485, 0.09175087532086124, 0.0, 0.0, 0.6346229344595797, 0.8483440750859271, 0.0, 0.0, 0.43168828228976236, 1.1746392457903614, 0.9786548179849323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22466432888463708, 0.0, 0.0, 0.23383689086971213, 0.07868759200794351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5047067971651431, 0.0, 0.04466167239654772, 0.0, 0.18031282035336027, 0.06880056169195574, 0.0, 0.039760598424459205, 0.2052193360923158, 0.0, 0.1245954319549744, 0.0, 0.0, 0.3371869924989234, 0.10076818165067654, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21628428501390357, 0.41878760793751535, 0.0, 0.2743772579349106, 0.0, 0.0, 0.0, 0.23737336558608516, 0.0, 0.5609891075534331, 0.0, 0.0, 0.0, 0.0, 0.11206304892649907, 0.03770992438488979, 0.0, 0.0, 0.17383517173220803, 0.252655932334268, 0.0, 0.1585306005724301, 0.0, 0.1865298137691172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05971061253533409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15913149160221682, 0.0, 0.4030910369398146, 0.0, 0.32022434202130456, 0.23687245172165972, 0.0, 0.3599325265793679, 0.0, 0.4765964362382492, 0.0, 0.0, 0.0, 0.0, 0.6107120779202261, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.573260313114729, 0.5768180939544553, 0.0, 0.17499066339826644, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7201822598869417, 0.0, 0.0, 0.0, 0.0, 0.08725982492406978, 0.5051841445138487, 0.0, 0.0, 0.0, 0.2569811590076317, 0.3964028116744446, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31198143094865527, 0.0, 0.0, 0.0, 0.0, 0.0, 0.42812611727229755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40187139064927635, 0.40436549376609193, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5533450612442088, 0.3541481069684905, 0.0, 0.0, 0.0, 0.1801509251180122, 0.0, 0.0, 0.0, 0.08899148634361333, 0.0, 0.4004935508153472, 0.1664002225353232, 0.6660561411338942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5274372110974553, 0.0, 0.0, 0.11745018036413006, 0.03952272816910389, 0.654574176319042, 0.0, 0.0, 0.0, 0.0, 0.29642163335908794, 0.1274499525156547, 0.03345490004078419, 0.0, 0.0, 0.07653915037041417, 0.0, 0.02978363268713748, 0.15372448023095572, 0.03967384765850083, 0.06258104057589436, 0.0, 0.0, 0.044337981986606795, 0.07548283043416787, 0.0, 0.0, 0.0, 0.04735429836324166, 0.0, 0.0, 0.10863396357319674, 0.1797235339241024, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2053113311622715, 0.0, 0.0, 0.6339058613302375, 0.26881844559242324, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.047384735026192754, 0.0, 0.09394657688969027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41199723535819494, 0.0, 0.0, 0.19249813639294353, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4930042077083581, 0.01569004651529754, 0.0, 0.1886409972802957, 0.0, 0.6504613027483894, 0.0, 0.0, 0.0, 0.4491308783325726, 0.0, 0.710600063326777, 0.0, 0.0, 0.0, 0.0, 0.6392481732081897, 0.0, 0.0, 0.21075790790635565, 0.143953089400151, 0.04517962123405222, 0.12446587761719602, 0.011602660433066613, 0.0, 0.021141064793252704, 0.017873704948355638, 0.0, 0.010329409925325224, 0.21730544865000528, 0.03874493610862431, 0.0, 0.0, 0.0, 0.04329986579681618, 0.02617857620219232, 0.0, 0.0, 0.14198488087669728, 0.04624555904799935, 0.33624630483183143, 0.0, 0.18065873234459212, 0.001167765010724762, 0.0, 0.41156050949339695, 0.06138103351330629, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45223330011890606, 0.0, 0.42719481956226096, 0.0, 0.09390028378745956, 0.03159803909732349, 0.0, 0.0, 0.0, 0.14464053921535813, 0.0, 0.355579453624945, 0.27582314245244144, 0.0, 0.0, 0.4779189904559178, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6540512683692249, 0.16386729047093346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08595593354654336, 0.0868518036911852, 0.1436872279500763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08976911428083846, 0.0, 0.0, 0.0, 0.06561633035108058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0209499775034313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06082234101182034, 0.19585779687130894, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01528743323594608, 0.0, 0.0, 0.0, 0.011174280569280867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3922416239543676, 0.0, 0.5308327922857218, 0.38811106643298066, 0.0, 0.5861742359361938, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010357877371534024, 0.03335404406511716, 0.2121541431315042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022181366862053645, 0.0, 0.0, 0.3232792127186002, 0.21780290259329063, 0.0, 0.0, 0.6724740217065875, 0.33454771410797746, 0.0, 0.0, 0.0, 0.0, 0.1517011050211833, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20421012621580428, 0.0, 0.0, 0.0, 0.09554663093280256, 0.0, 0.9066194809744413, 0.0, 0.6530356828494477, 0.0, 0.0, 0.4162310356447254, 0.2004622291406719, 0.0, 0.06531630990369706, 0.0, 0.0, 0.22516333873266767, 0.08751639468488193, 0.31273858097846424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10565952264223451, 0.25213753900728575, 0.0, 0.13605306731422323, 0.0, 0.0, 0.20467409186458246, 0.0, 0.0, 0.0, 0.14062561725182204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06654804138060808, 0.04780210989364991, 0.3312858803709398, 0.0, 0.09056986573031543, 0.0, 0.0, 0.22425218794100035, 0.6978010919630194, 0.5644822411311934, 0.0, 0.0, 0.0, 0.0, 0.25245963204466193, 0.5714387205668741, 0.0, 0.7794778768245345, 0.5229243836029062, 0.6068223768633771, 0.0, 0.0, 0.3439063861094934, 0.0, 0.5825998753106167, 0.07738278703840655, 0.0, 0.0, 0.45088731313609587, 0.020026145653267647, 0.49371220470576765, 0.0, 0.1967497337446331, 0.9333687888834381, 0.2687116640308751, 0.3109501951809905, 0.0, 0.628995837333548, 0.6431918870734405, 0.0, 0.0, 0.4278019777568812, 0.0, 0.0, 0.3426634565816645, 0.0, 0.7642079896603969, 0.15720215492969136, 0.0, 0.5627301734522071, 0.0, 0.0, 0.03992028432745955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8902694418730629, 0.24785210085431258, 0.0, 0.0, 0.0, 0.19750384789873962, 0.0, 0.0, 0.24837057467209334, 0.0, 0.0, 0.06196917660673224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9196979517908253, 0.47523150409994275, 0.0, 0.05084636736867684, 0.30281629383729847, 0.0, 0.0, 0.0, 0.0, 0.7797375130571516, 0.0, 0.0, 0.009193696508787878, 0.7164774465476489, 0.37223214885126943, 0.36077619715591613]), 42), 42)),) + diff --git a/test/test_parallel.jl b/test/test_parallel.jl index 312d7826c..e33f8b2ab 100644 --- a/test/test_parallel.jl +++ b/test/test_parallel.jl @@ -28,6 +28,370 @@ @test check_output("parallel/parallel_spmv.txt", String(take!(io))) end + let + io = IOBuffer() + A = fsprand(42, 42, 0.1) + B = fsprand(42, 42, 0.1) + CR = Tensor(Dense(Dense(Element(0.0))), zeros(42, 42)) + @repl io @finch begin + CR .= 0 + for i = _ + for j = _ + for k = _ + CR[i, j] += A[i, k] * B[k, j] + end + end + end + end + + AFormat = SparseList(Dense(Element(0.0))) + At = Tensor(AFormat, A) + BFormat = Dense(SparseList(Element(0.0))) + Bt = Tensor(BFormat, B) + Ct = Tensor(Dense(Dense(Element(0.0))), zeros(42, 42)) + @repl io @finch_code begin + Ct .= 0 + for i = parallel(_) + for j = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for i = parallel(_) + for j = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + + @repl io @finch_code begin + Ct .= 0 + for i = _ + for j = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for i = _ + for j = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + @repl io @finch_code begin + Ct .= 0 + for j = parallel(_) + for i = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for j = parallel(_) + for i = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + + @repl io @finch_code begin + Ct .= 0 + for j = _ + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for j = _ + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + @repl io @finch_code begin + Ct .= 0 + for j = parallel(_) + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for j = parallel(_) + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + +#= + formats = [Dense, SparseList] + for fmatA1 in formats + for fmatA2 in formats + Af = fmatA2(fmatA1(Element(0.0))) + At = Tensor(Af, A) + for fmatB1 in formats + for fmatB2 in formats + Bf = fmatB2(fmatB1(Element(0.0))) + Bt = Tensor(Bf, B) + + Ct = Tensor(Dense(Dense(Element(0.0))), zeros(42, 42)) + + @repl io @finch_code begin + Ct .= 0 + for i = parallel(_) + for j = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for i = parallel(_) + for j = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + + @repl io @finch_code begin + Ct .= 0 + for i = _ + for j = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for i = _ + for j = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + @repl io @finch_code begin + Ct .= 0 + for j = parallel(_) + for i = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for j = parallel(_) + for i = _ + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + + @repl io @finch_code begin + Ct .= 0 + for j = _ + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + @repl io @finch begin + Ct .= 0 + for j = _ + for i = parallel(_) + for k = _ + Ct[i, j] += A[i, k] * B[k, j] + end + end + end + end + + @test Ct == CR + + + end + end + end + end =# + @test check_output("debug_parallel_spmms_no_atomics.txt", String(take!(io))) + end + + let + io = IOBuffer() + A = fsprand(Int64, 42, 42, 0.9) + B = fsprand(Int64, 42, 42, 0.9) + CR = Tensor(Dense(Dense(Element(0))), zeros(42, 42)) + @repl io @finch begin + CR .= 0 + for i = _ + for j = _ + for k = _ + CR[i, j] += A[i, k] * B[k, j] + end + end + end + end + + AFormat = SparseList(Dense(Element(0))) + At = Tensor(AFormat, A) + BFormat = Dense(SparseList(Element(0))) + Bt = Tensor(BFormat, B) + Ct = Tensor(Dense(Dense(Atomic(Element(0)))), zeros(42, 42)) + CBad = Tensor(Dense(Dense((Element(0)))), zeros(42, 42)) + +#= @test_throws Finch.FinchConcurrencyError begin + @finch_code begin + Ct .= 0 + for i = _ + for j = _ + for k = parallel(_) + CBad[i, j] += A[i, k] * B[k, j] + end + end + end + end + end =# + + + # @repl io @finch_code begin + # Ct .= 0 + # for i = _ + # for j = _ + # for k = parallel(_) + # Ct[i, j] += A[i, k] * B[k, j] + # end + # end + # end + # end + # @repl io @finch begin + # Ct .= 0 + # for i = _ + # for j = _ + # for k = parallel(_) + # Ct[i, j] += A[i, k] * B[k, j] + # end + # end + # end + # end + + # @test Ct == CR + + # @repl io @finch_code begin + # Ct .= 0 + # for i = _ + # for k = parallel(_) + # for j = _ + # Ct[i, j] += A[i, k] * B[k, j] + # end + # end + # end + # end + # @repl io @finch begin + # Ct .= 0 + # for i = _ + # for k = parallel(_) + # for j = _ + + # Ct[i, j] += A[i, k] * B[k, j] + # end + # end + # end + # end + + # @test Ct == CR + + # @repl io @finch_code begin + # Ct .= 0 + # for k = parallel(_) + # for i = _ + # for j = _ + # Ct[i, j] += A[i, k] * B[k, j] + # end + # end + # end + # end + # @repl io @finch begin + # Ct .= 0 + # for k = parallel(_) + # for i = _ + # for j = _ + + # Ct[i, j] += A[i, k] * B[k, j] + # end + # end + # end + # end + + # @test Ct == CR + + @test check_output("debug_parallel_spmms_atomics.txt", String(take!(io))) + end + let io = IOBuffer() A = Tensor(Dense(SparseList(Element(0.0))), [1 2; 3 4]) @@ -212,4 +576,29 @@ end end) end + let + io = IOBuffer() + y = Tensor(Dense(Atomic(Element(0.0)))) + A = Tensor(Dense(SparseList(Element(0.0)))) + x = Tensor(Dense(Element(0.0))) + diag = Tensor(Dense(Element(0.0))) + y_j = Scalar(0.0) + @repl io @finch_code begin + y .= 0 + for j = parallel(_) + let x_j = x[j] + y_j .= 0 + for i = _ + let A_ij = A[i, j] + y[i] += x_j * A_ij + y_j[] += A_ij * x[i] + end + end + y[j] += y_j[] + diag[j] * x_j + end + end + end + @test check_output("atomics_sym_spmv.txt", String(take!(io))) + + end end