Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,26 @@ SparseArraysExt = "SparseArrays"
StatisticsExt = "Statistics"

[compat]
Aqua = "0.8.12"
Distributed = "<0.0.1, 1"
ExplicitImports = "1.13.2"
LinearAlgebra = "<0.0.1, 1"
Primes = "0.4, 0.5"
Random = "<0.0.1, 1"
Serialization = "<0.0.1, 1"
SparseArrays = "<0.0.1, 1"
SpecialFunctions = "0.8, 1, 2"
Statistics = "<0.0.1, 1"
Test = "<0.0.1, 1"
julia = "1.10"

[extras]
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["ExplicitImports", "SparseArrays", "SpecialFunctions", "Statistics", "Test"]
test = ["Aqua", "ExplicitImports", "SparseArrays", "SpecialFunctions", "Statistics", "Test"]
19 changes: 18 additions & 1 deletion ext/SparseArraysExt.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
module SparseArraysExt

using DistributedArrays: DArray, localpart
using DistributedArrays: DArray, SubDArray, SubOrDArray, localpart
using DistributedArrays.Distributed: remotecall_fetch
using SparseArrays: SparseArrays, nnz

Expand All @@ -11,4 +11,21 @@ function SparseArrays.nnz(A::DArray)
return reduce(+, B)
end

# Fix method ambiguities
# TODO: Improve efficiency?
Base.copyto!(dest::SubOrDArray{<:Any,2}, src::SparseArrays.AbstractSparseMatrixCSC) = copyto!(dest, Matrix(src))
@static if isdefined(SparseArrays, :CHOLMOD)
Base.copyto!(dest::SubOrDArray, src::SparseArrays.CHOLMOD.Dense) = copyto!(dest, Array(src))
Base.copyto!(dest::SubOrDArray{T}, src::SparseArrays.CHOLMOD.Dense{T}) where {T<:Union{Float32,Float64,ComplexF32,ComplexF64}} = copyto!(dest, Array(src))
Base.copyto!(dest::SubOrDArray{T,2}, src::SparseArrays.CHOLMOD.Dense{T}) where {T<:Union{Float32,Float64,ComplexF32,ComplexF64}} = copyto!(dest, Array(src))
end

# Fix method ambiguities
for T in (:DArray, :SubDArray)
@eval begin
Base.:(==)(d1::$T{<:Any,1}, d2::SparseArrays.ReadOnly) = d1 == parent(d2)
Base.:(==)(d1::SparseArrays.ReadOnly, d2::$T{<:Any,1}) = parent(d1) == d2
end
end

end
2 changes: 1 addition & 1 deletion src/broadcast.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ end
# - Q: How do decide on the cuts
# - then localise arguments on each node
##
@inline function Base.copyto!(dest::DDestArray, bc::Broadcasted)
@inline function Base.copyto!(dest::DDestArray, bc::Broadcasted{Nothing})
axes(dest) == axes(bc) || Broadcast.throwdm(axes(dest), axes(bc))

# Distribute Broadcasted
Expand Down
35 changes: 27 additions & 8 deletions src/darray.jl
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,9 @@ Can return a view into `localpart(A)`
end

# shortcut to set/get localparts of a distributed object
function Base.getindex(d::DArray, s::Symbol)
Base.getindex(d::DArray, s::Symbol) = _getindex(d, s)
Base.getindex(d::DArray{<:Any, 1}, s::Symbol) = _getindex(d, s)
function _getindex(d::DArray, s::Symbol)
@assert s in [:L, :l, :LP, :lp]
return localpart(d)
end
Expand Down Expand Up @@ -497,10 +499,28 @@ dfill(v, d1::Integer, drest::Integer...) = dfill(v, convert(Dims, tuple(d1, dres
Construct a distributed uniform random array.
Trailing arguments are the same as those accepted by `DArray`.
"""
drand(r, dims::Dims, args...) = DArray(I -> rand(r, map(length,I)), dims, args...)
drand(r, d1::Integer, drest::Integer...) = drand(r, convert(Dims, tuple(d1, drest...)))
drand(d1::Integer, drest::Integer...) = drand(Float64, convert(Dims, tuple(d1, drest...)))
drand(d::Dims, args...) = drand(Float64, d, args...)
drand(::Type{T}, dims::Dims) where {T} = DArray(I -> rand(T, map(length, I)), dims)
drand(X, dims::Dims) = DArray(I -> rand(X, map(length, I)), dims)
drand(dims::Dims) = drand(Float64, dims)

drand(::Type{T}, d1::Integer, drest::Integer...) where {T} = drand(T, Dims((d1, drest...)))
drand(X, d1::Integer, drest::Integer...) = drand(X, Dims((d1, drest...)))
drand(d1::Integer, drest::Integer...) = drand(Float64, Dims((d1, drest...)))

# With optional process IDs and number of chunks
for N in (1, 2)
@eval begin
drand(::Type{T}, dims::Dims, args::Vararg{Any,$N}) where {T} = DArray(I -> rand(T, map(length, I)), dims, args...)
drand(X, dims::Dims, args::Vararg{Any,$N}) = DArray(I -> rand(X, map(length, I)), dims, args...)
drand(dims::Dims, args::Vararg{Any,$N}) = drand(Float64, dims, args...)
end
end

# Fix method ambiguities
drand(dims::Dims, procs::Tuple{Vararg{Int}}) = drand(Float64, dims, procs)
drand(dims::Dims, procs::Tuple{Vararg{Int}}, dist) = drand(Float64, dims, procs, dist)
drand(X::Tuple{Vararg{Int}}, dim::Integer) = drand(X, Dims((dim,)))
drand(X::Tuple{Vararg{Int}}, d1::Integer, d2::Integer) = drand(X, Dims((d1, d2)))

"""
drandn(dims, ...)
Expand Down Expand Up @@ -620,7 +640,7 @@ allowscalar(flag = true) = (_allowscalar[] = flag)
_scalarindexingallowed() = _allowscalar[] || throw(ErrorException("scalar indexing disabled"))

getlocalindex(d::DArray, idx...) = localpart(d)[idx...]
function getindex_tuple(d::DArray{T}, I::Tuple{Vararg{Int}}) where T
function getindex_tuple(d::DArray{T,N}, I::NTuple{N,Int}) where {T,N}
chidx = locate(d, I...)
idxs = d.indices[chidx...]
localidx = ntuple(i -> (I[i] - first(idxs[i]) + 1), ndims(d))
Expand All @@ -632,11 +652,10 @@ function Base.getindex(d::DArray, i::Int)
_scalarindexingallowed()
return getindex_tuple(d, Tuple(CartesianIndices(d)[i]))
end
function Base.getindex(d::DArray, i::Int...)
function Base.getindex(d::DArray{<:Any,N}, i::Vararg{Int,N}) where {N}
_scalarindexingallowed()
return getindex_tuple(d, i)
end

Base.getindex(d::DArray) = d[1]
Base.getindex(d::SubDArray, I::Int...) = invoke(getindex, Tuple{SubArray{<:Any,N},Vararg{Int,N}} where N, d, I...)
Base.getindex(d::SubOrDArray, I::Union{Int,UnitRange{Int},Colon,Vector{Int},StepRange{Int,Int}}...) = view(d, I...)
Expand Down
8 changes: 7 additions & 1 deletion src/mapreduce.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,13 @@ function Base.map!(f::F, dest::DArray, src::DArray{<:Any,<:Any,A}) where {F,A}
return dest
end

function Base.reduce(f, d::DArray)
# Only defining `reduce(f, ::DArray)` causes method ambiguity issues with
# - `reduce(hcat, ::AbstractVector{<:AbstractVecOrMat})`
# - `reduce(vcat, ::AbstractVector{<:AbstractVecOrMat})`
Base.reduce(f, d::DArray) = _reduce(f, d)
Base.reduce(::typeof(hcat), d::DArray{<:AbstractVecOrMat, 1}) = _reduce(hcat, d)
Base.reduce(::typeof(vcat), d::DArray{<:AbstractVecOrMat, 1}) = _reduce(vcat, d)
function _reduce(f, d::DArray)
results = asyncmap(procs(d)) do p
remotecall_fetch(p) do
return reduce(f, localpart(d))
Expand Down
4 changes: 2 additions & 2 deletions src/spmd.jl
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ struct WorkerChannelDict
end
const WORKERCHANNELS = WorkerChannelDict()

Base.get!(f, x::WorkerChannelDict, id::Int) = @lock x.lock get!(f, x.data, id)
Base.get!(f::Function, x::WorkerChannelDict, id::Int) = @lock x.lock get!(f, x.data, id)

# mapping between a context id and context object
struct SPMDContextDict
Expand All @@ -54,7 +54,7 @@ end
const CONTEXTS = SPMDContextDict()

Base.delete!(x::SPMDContextDict, id::Tuple{Int,Int}) = @lock x.lock delete!(x.data, id)
Base.get!(f, x::SPMDContextDict, id::Tuple{Int,Int}) = @lock x.lock get!(f, x.data, id)
Base.get!(f::Function, x::SPMDContextDict, id::Tuple{Int,Int}) = @lock x.lock get!(f, x.data, id)

function context_local_storage()
ctxt = get_ctxt_from_id(task_local_storage(:SPMD_CTXT))
Expand Down
6 changes: 6 additions & 0 deletions test/aqua.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
using DistributedArrays, Test
import Aqua

@testset "Aqua" begin
Aqua.test_all(DistributedArrays; ambiguities = (; broken = true))
end
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ function check_leaks()
end
end

include("aqua.jl")
include("explicit_imports.jl")
include("darray.jl")
include("spmd.jl")
Expand Down