Package evaluation to test LinearAlgebraMPI on Julia 1.12.4 (422f456051*) started at 2026-01-29T05:40:52.867 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Activating project at `~/.julia/environments/v1.12` Set-up completed after 8.38s ################################################################################ # Installation # Installing LinearAlgebraMPI... Resolving package versions... Updating `~/.julia/environments/v1.12/Project.toml` [5bdd2be4] + LinearAlgebraMPI v0.1.9 Updating `~/.julia/environments/v1.12/Manifest.toml` [79e6a3ab] + Adapt v4.4.0 [a9b6321e] + Atomix v1.1.2 [8f478455] + Blake3Hash v0.3.0 [ffbed154] + DocStringExtensions v0.9.5 [692b3bcd] + JLLWrappers v1.7.1 [63c18a36] + KernelAbstractions v0.9.39 [5bdd2be4] + LinearAlgebraMPI v0.1.9 [da04e1cc] + MPI v0.20.23 [3da0fdf6] + MPIPreferences v0.1.11 [55d2b088] + MUMPS v1.6.0 [1914dd2f] + MacroTools v0.5.16 [eebad327] + PkgVersion v0.3.3 [aea7be01] + PrecompileTools v1.3.3 [21216c6a] + Preferences v1.5.1 [ae029012] + Requires v1.3.1 [fdea26ae] + SIMD v3.7.2 [90137ffa] + StaticArrays v1.9.16 [1e83bf80] + StaticArraysCore v1.4.4 [013be700] + UnsafeAtomics v0.3.0 [6e34b625] + Bzip2_jll v1.0.9+0 [e33a78d0] + Hwloc_jll v2.12.2+0 [94ce4f54] + Libiconv_jll v1.18.0+0 [d00139f3] + METIS_jll v5.1.3+0 [7cb0a576] + MPICH_jll v4.3.2+0 [f1f71cc9] + MPItrampoline_jll v5.5.4+0 [ca64183c] + MUMPS_jll v5.8.2+0 [9237b28f] + MicrosoftMPI_jll v10.1.4+3 [656ef2d0] + OpenBLAS32_jll v0.3.30+0 [fe0851c0] + OpenMPI_jll v5.0.9+0 [b247a4be] + PARMETIS_jll v4.0.6+2 [aabda75e] + SCALAPACK32_jll v2.2.2+0 [a8d0f55d] + SCOTCH_jll v7.0.7+0 ⌅ [02c8fc9c] + XML2_jll v2.13.9+0 [ffd25f8a] + XZ_jll v5.8.2+0 [a65dc6b1] + Xorg_libpciaccess_jll v0.18.1+0 [0dad84c5] + ArgTools v1.1.2 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [f43a241f] + Downloads v1.7.0 [7b1f6079] + FileWatching v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [ac6e5ff7] + JuliaSyntaxHighlighting v1.12.0 [4af54fe1] + LazyArtifacts v1.11.0 [b27032c2] + LibCURL v0.6.4 [76f85450] + LibGit2 v1.11.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.12.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [ca575930] + NetworkOptions v1.3.0 [44cfe95a] + Pkg v1.12.1 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v0.7.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.12.0 [f489334b] + StyledStrings v1.11.0 [fa267f1f] + TOML v1.0.3 [a4e569a6] + Tar v1.10.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] + LibCURL_jll v8.15.0+0 [e37daf67] + LibGit2_jll v1.9.0+0 [29816b5a] + LibSSH2_jll v1.11.3+1 [14a3606d] + MozillaCACerts_jll v2025.11.4 [4536629a] + OpenBLAS_jll v0.3.29+0 [458c3c95] + OpenSSL_jll v3.5.4+0 [bea87d4a] + SuiteSparse_jll v7.8.3+2 [83775a58] + Zlib_jll v1.3.1+2 [8e850b90] + libblastrampoline_jll v5.15.0+0 [8e850ede] + nghttp2_jll v1.64.0+1 [3f19e933] + p7zip_jll v17.7.0+0 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. To see why use `status --outdated -m` Installation completed after 11.67s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling package dependencies... Precompiling packages... 6303.4 ms ✓ NCCL_jll ✗ LinearAlgebraMPI 354515.8 ms ✓ MPI → CUDAExt 64427.7 ms ✓ NCCL ✗ LinearAlgebraMPI → LinearAlgebraMPICUDAExt 3 dependencies successfully precompiled in 568 seconds. 130 already precompiled. ERROR: LoadError: The following 2 direct dependencies failed to precompile: LinearAlgebraMPI Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPI/jl_ysJqx6". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base_compiler.jl:133 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base_compiler.jl:57 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include(mod::Module, _path::String) @ Base ./Base.jl:306 [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3024 [12] top-level scope @ stdin:5 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2870 [15] include_string @ ./loading.jl:2880 [inlined] [16] exec_options(opts::Base.JLOptions) @ Base ./client.jl:315 [17] _start() @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:5 LinearAlgebraMPI → LinearAlgebraMPICUDAExt Failed to precompile LinearAlgebraMPICUDAExt [386e412e-22e3-592f-a311-c0c588906e07] to "/home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPICUDAExt/jl_rbGxSS". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base_compiler.jl:133 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base_compiler.jl:57 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include(mod::Module, _path::String) @ Base ./Base.jl:306 [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3024 [12] top-level scope @ stdin:5 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2870 [15] include_string @ ./loading.jl:2880 [inlined] [16] exec_options(opts::Base.JLOptions) @ Base ./client.jl:315 [17] _start() @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPI/jl_DtH54y". Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3311 [3] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId, String, Dict{String, Int64}})() @ Base ./loading.jl:2679 [4] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId, String, Dict{String, Int64}}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.12/FileWatching/src/pidfile.jl:93 [5] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.12/FileWatching/src/pidfile.jl:88 [inlined] [6] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.12/FileWatching/src/pidfile.jl:114 [7] #invokelatest_gr#232 @ ./reflection.jl:1297 [inlined] [8] invokelatest_gr @ ./reflection.jl:1289 [inlined] [9] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId, String, Dict{String, Int64}}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3882 [10] maybe_cachefile_lock @ ./loading.jl:3879 [inlined] [11] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2665 [12] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2493 [13] macro expansion @ ./loading.jl:2421 [inlined] [14] macro expansion @ ./lock.jl:376 [inlined] [15] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2386 [16] require(into::Module, mod::Symbol) @ Base ./loading.jl:2362 [17] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/ext/LinearAlgebraMPICUDAExt.jl:13 [18] include(mod::Module, _path::String) @ Base ./Base.jl:306 [19] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3024 [20] top-level scope @ stdin:5 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2870 [23] include_string @ ./loading.jl:2880 [inlined] [24] exec_options(opts::Base.JLOptions) @ Base ./client.jl:315 [25] _start() @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/ext/LinearAlgebraMPICUDAExt.jl:1 in expression starting at stdin:5 in expression starting at /PkgEval.jl/scripts/precompile.jl:34 Precompilation failed after 614.18s ################################################################################ # Testing # Testing LinearAlgebraMPI Status `/tmp/jl_Xd6wtu/Project.toml` [79e6a3ab] Adapt v4.4.0 [8f478455] Blake3Hash v0.3.0 [052768ef] CUDA v5.9.6 [63c18a36] KernelAbstractions v0.9.39 [5bdd2be4] LinearAlgebraMPI v0.1.9 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [55d2b088] MUMPS v1.6.0 [3fe64909] NCCL v0.1.2 [aea7be01] PrecompileTools v1.3.3 [90137ffa] StaticArrays v1.9.16 [4889d778] CUDSS_jll v0.7.1+0 [37e2e46d] LinearAlgebra v1.12.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.12.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_Xd6wtu/Manifest.toml` [621f4979] AbstractFFTs v1.5.0 [79e6a3ab] Adapt v4.4.0 [a9b6321e] Atomix v1.1.2 [ab4f0b2a] BFloat16s v0.6.1 [8f478455] Blake3Hash v0.3.0 [fa961155] CEnum v0.5.0 [052768ef] CUDA v5.9.6 [1af6417a] CUDA_Runtime_Discovery v1.0.0 [34da2185] Compat v4.18.1 [a8cc5b0e] Crayons v4.1.1 [9a962f9c] DataAPI v1.16.0 [a93c6f00] DataFrames v1.8.1 [864edb3b] DataStructures v0.19.3 [e2d170a0] DataValueInterfaces v1.0.0 [ffbed154] DocStringExtensions v0.9.5 [e2ba6199] ExprTools v0.1.10 [0c68f7d7] GPUArrays v11.3.4 [46192b85] GPUArraysCore v0.2.0 [61eb1bfa] GPUCompiler v1.8.2 [096a3bc2] GPUToolbox v1.0.0 [076d061b] HashArrayMappedTries v0.2.0 [842dd82b] InlineStrings v1.4.5 [41ab1584] InvertedIndices v1.3.1 [82899510] IteratorInterfaceExtensions v1.0.0 [692b3bcd] JLLWrappers v1.7.1 [63c18a36] KernelAbstractions v0.9.39 [929cbde3] LLVM v9.4.4 [8b046642] LLVMLoopInfo v1.0.0 [b964fa9f] LaTeXStrings v1.4.0 [5bdd2be4] LinearAlgebraMPI v0.1.9 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [55d2b088] MUMPS v1.6.0 [1914dd2f] MacroTools v0.5.16 [e1d29d7a] Missings v1.2.0 [3fe64909] NCCL v0.1.2 [5da4648a] NVTX v1.0.3 [bac558e1] OrderedCollections v1.8.1 [eebad327] PkgVersion v0.3.3 [2dfb63ee] PooledArrays v1.4.3 [aea7be01] PrecompileTools v1.3.3 [21216c6a] Preferences v1.5.1 [08abe8d2] PrettyTables v3.1.2 [74087812] Random123 v1.7.1 [e6cf234a] RandomNumbers v1.6.0 [189a3867] Reexport v1.2.2 [ae029012] Requires v1.3.1 [fdea26ae] SIMD v3.7.2 [7e506255] ScopedValues v1.5.0 [6c6a2e73] Scratch v1.3.0 [91c51154] SentinelArrays v1.4.9 [a2af1166] SortingAlgorithms v1.2.2 [90137ffa] StaticArrays v1.9.16 [1e83bf80] StaticArraysCore v1.4.4 [10745b16] Statistics v1.11.1 [892a3eda] StringManipulation v0.4.2 [3783bdb8] TableTraits v1.0.1 [bd369af6] Tables v1.12.1 [e689c965] Tracy v0.1.6 [013be700] UnsafeAtomics v0.3.0 [6e34b625] Bzip2_jll v1.0.9+0 [d1e2174e] CUDA_Compiler_jll v0.4.1+1 [4ee394cb] CUDA_Driver_jll v13.1.0+2 ⌅ [76a88914] CUDA_Runtime_jll v0.19.2+0 [4889d778] CUDSS_jll v0.7.1+0 [e33a78d0] Hwloc_jll v2.12.2+0 [9c1d0b0a] JuliaNVTXCallbacks_jll v0.2.1+0 [dad2f222] LLVMExtra_jll v0.0.38+0 [ad6e5548] LibTracyClient_jll v0.13.1+0 [94ce4f54] Libiconv_jll v1.18.0+0 [d00139f3] METIS_jll v5.1.3+0 [7cb0a576] MPICH_jll v4.3.2+0 [f1f71cc9] MPItrampoline_jll v5.5.4+0 [ca64183c] MUMPS_jll v5.8.2+0 [9237b28f] MicrosoftMPI_jll v10.1.4+3 [4d6d38e4] NCCL_jll v2.28.3+0 [e98f9f5b] NVTX_jll v3.2.2+0 [656ef2d0] OpenBLAS32_jll v0.3.30+0 [fe0851c0] OpenMPI_jll v5.0.9+0 [b247a4be] PARMETIS_jll v4.0.6+2 [aabda75e] SCALAPACK32_jll v2.2.2+0 [a8d0f55d] SCOTCH_jll v7.0.7+0 ⌅ [02c8fc9c] XML2_jll v2.13.9+0 [ffd25f8a] XZ_jll v5.8.2+0 [a65dc6b1] Xorg_libpciaccess_jll v0.18.1+0 [1e29f10c] demumble_jll v1.3.0+0 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.7.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [ac6e5ff7] JuliaSyntaxHighlighting v1.12.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v0.6.4 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.12.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [ca575930] NetworkOptions v1.3.0 [44cfe95a] Pkg v1.12.1 [de0858da] Printf v1.11.0 [3fa0cd96] REPL v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v0.7.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.12.0 [f489334b] StyledStrings v1.11.0 [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] LibCURL_jll v8.15.0+0 [e37daf67] LibGit2_jll v1.9.0+0 [29816b5a] LibSSH2_jll v1.11.3+1 [14a3606d] MozillaCACerts_jll v2025.11.4 [4536629a] OpenBLAS_jll v0.3.29+0 [458c3c95] OpenSSL_jll v3.5.4+0 [bea87d4a] SuiteSparse_jll v7.8.3+2 [83775a58] Zlib_jll v1.3.1+2 [8e850b90] libblastrampoline_jll v5.15.0+0 [8e850ede] nghttp2_jll v1.64.0+1 [3f19e933] p7zip_jll v17.7.0+0 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. Testing Running tests... Precompiling packages... 1493.5 ms ✓ MPIPreferences 5043.3 ms ✓ MPItrampoline_jll 4535.5 ms ✓ OpenMPI_jll 4446.4 ms ✓ MPICH_jll 13982.4 ms ✓ MPI 5 dependencies successfully precompiled in 30 seconds. 40 already precompiled. Precompiling packages... 5121.1 ms ✓ Blake3Hash 4981.0 ms ✓ SCALAPACK32_jll 5066.3 ms ✓ PARMETIS_jll 4952.8 ms ✓ MUMPS_jll 7408.2 ms ✓ MUMPS Info Given LinearAlgebraMPI was explicitly requested, output will be shown live  ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types.  Closest candidates are:  MUMPS.MUMPS_JOB(::Integer)  @ MUMPS Enums.jl:211  convert(::Type{T}, !Matched::T) where T  @ Base Base_compiler.jl:133  Stacktrace:  [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32)  @ Base ./Base_compiler.jl:57  [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328  [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419  [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64})  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484  [5] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined]  [6] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined]  [7] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined]  [8] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined]  [9] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308  [10] include(mod::Module, _path::String)  @ Base ./Base.jl:306  [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3024  [12] top-level scope  @ stdin:5  [13] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:2870  [15] include_string  @ ./loading.jl:2880 [inlined]  [16] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:315  [17] _start()  @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:5 ✗ LinearAlgebraMPI 5 dependencies successfully precompiled in 83 seconds. 65 already precompiled. ┌ Warning: Precompile step hit an error; tests may still proceed │ err = │ The following 1 direct dependency failed to precompile: │ │ LinearAlgebraMPI │ │ Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPI/jl_pNArhR". │ ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB │ The function `convert` exists, but no method is defined for this combination of argument types. │ │ Closest candidates are: │ MUMPS.MUMPS_JOB(::Integer) │ @ MUMPS Enums.jl:211 │ convert(::Type{T}, !Matched::T) where T │ @ Base Base_compiler.jl:133 │ │ Stacktrace: │ [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) │ @ Base ./Base_compiler.jl:57 │ [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) │ @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 │ [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) │ @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 │ [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) │ @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 │ [5] macro expansion │ @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] │ [6] macro expansion │ @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined] │ [7] macro expansion │ @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] │ [8] macro expansion │ @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined] │ [9] top-level scope │ @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 │ [10] include(mod::Module, _path::String) │ @ Base ./Base.jl:306 │ [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) │ @ Base ./loading.jl:3024 │ [12] top-level scope │ @ stdin:5 │ [13] eval(m::Module, e::Any) │ @ Core ./boot.jl:489 │ [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) │ @ Base ./loading.jl:2870 │ [15] include_string │ @ ./loading.jl:2880 [inlined] │ [16] exec_options(opts::Base.JLOptions) │ @ Base ./client.jl:315 │ [17] _start() │ @ Base ./client.jl:550 │ in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 │ in expression starting at stdin: └ @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:12 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fff442591f0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_matrix_multiplication.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_matrix_multiplication.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Matrix Multiplication: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:38 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:38 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffc3759e190) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_transpose.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_transpose.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Transpose: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:41 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:41 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffee1d72800) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_addition.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_addition.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Addition: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:44 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:44 [inlined] PrecompilingPrecompiling packages... packages... LinearAlgebraMPI Being precompiled by another process (pid: 638, pidfile: /home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPI/t5NwP_eJM3J.ji.pidfile) Info Given LinearAlgebraMPI was explicitly requested, output will be shown live  ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types.  Closest candidates are:  MUMPS.MUMPS_JOB(::Integer)  @ MUMPS Enums.jl:211  convert(::Type{T}, !Matched::T) where T  @ Base Base_compiler.jl:133  Stacktrace:  [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32)  @ Base ./Base_compiler.jl:57  [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328  [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419  [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64})  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484  [5] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined]  [6] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined]  [7] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined]  [8] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined]  [9] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308  [10] include(mod::Module, _path::String)  @ Base ./Base.jl:306  [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3024  [12] top-level scope  @ stdin:5  [13] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:2870  [15] include_string  @ ./loading.jl:2880 [inlined]  [16] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:315  [17] _start()  @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:5 ✗ LinearAlgebraMPI 0 dependencies successfully precompiled in 60 seconds. 70 already precompiled. ERROR: LoadError: The following 1 direct dependency failed to precompile: LinearAlgebraMPI Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPI/jl_7YSSG3". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base_compiler.jl:133 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base_compiler.jl:57 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include(mod::Module, _path::String) @ Base ./Base.jl:306 [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3024 [12] top-level scope @ stdin:5 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2870 [15] include_string @ ./loading.jl:2880 [inlined] [16] exec_options(opts::Base.JLOptions) @ Base ./client.jl:315 [17] _start() @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin: in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl:17 Info Given LinearAlgebraMPI was explicitly requested, output will be shown live  ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types.  Closest candidates are:  MUMPS.MUMPS_JOB(::Integer)  @ MUMPS Enums.jl:211  convert(::Type{T}, !Matched::T) where T  @ Base Base_compiler.jl:133  Stacktrace:  [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32)  @ Base ./Base_compiler.jl:57  [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328  [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419  [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64})  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484  [5] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined]  [6] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined]  [7] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined]  [8] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined]  [9] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308  [10] include(mod::Module, _path::String)  @ Base ./Base.jl:306  [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3024  [12] top-level scope  @ stdin:5  [13] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:2870  [15] include_string  @ ./loading.jl:2880 [inlined]  [16] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:315  [17] _start()  @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:5 ✗ LinearAlgebraMPI 0 dependencies successfully precompiled in 183 seconds. 70 already precompiled. ERROR: LoadError: The following 1 direct dependency failed to precompile: LinearAlgebraMPI Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.12/LinearAlgebraMPI/jl_fB207V". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base_compiler.jl:133 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base_compiler.jl:57 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include(mod::Module, _path::String) @ Base ./Base.jl:306 [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3024 [12] top-level scope @ stdin:5 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2870 [15] include_string @ ./loading.jl:2880 [inlined] [16] exec_options(opts::Base.JLOptions) @ Base ./client.jl:315 [17] _start() @ Base ./client.jl:550 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin: in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl:17 ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl" │ ok = false │ expect_success = true │ exitcode = 1 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Lazy Transpose: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:47 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:47 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffce0ae47e0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_vector_multiplication.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_vector_multiplication.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Vector Multiplication: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:50 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:50 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffcd799e190) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_dense_matrix.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_dense_matrix.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Dense Matrix: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:53 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:53 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffeab816310) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_sparse_api.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_sparse_api.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Sparse API Extensions: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:56 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:56 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffdd69e4e60) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_blocks.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_blocks.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Block Matrix Operations: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:59 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:59 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffda3e6c700) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffff11091f0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_utilities.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_utilities.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Utilities: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:62 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:62 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffda3317ab0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_local_constructors.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_local_constructors.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Local Constructors: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:65 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:65 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe3ffc65c0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffcd5179d40) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_indexing.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_indexing.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Indexing: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:68 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:68 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fffb5c280c0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:85 Expression: occursin("Pass:", output) Evaluated: occursin("Pass:", "") Stacktrace: [1] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [2] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:85 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:86 Expression: occursin("Fail: 0", output) Evaluated: occursin("Fail: 0", "") Stacktrace: [1] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [2] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:86 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:87 Expression: occursin("Error: 0", output) Evaluated: occursin("Error: 0", "") Stacktrace: [1] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [2] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:87 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fffb42b9970) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_new_operations.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_new_operations.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" Mixed Sparse-Dense Operations: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:90 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:90 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe3382cf00) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_repartition.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_repartition.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI Repartition: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:93 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:93 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffda54b13a0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-4ATBWmiQ (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_map_rows.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Xd6wtu/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_map_rows.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Xd6wtu", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Xd6wtu/Project.toml" MPI map_rows: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:96 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.12/Test/src/Test.jl:1776 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:96 [inlined] Test Summary: | Fail Total Time LinearAlgebraMPI Tests | 17 17 6m23.2s MPI Matrix Multiplication | 1 1 18.1s MPI Transpose | 1 1 14.6s MPI Addition | 1 1 12.8s MPI Lazy Transpose | 1 1 3m20.3s MPI Vector Multiplication | 1 1 11.6s MPI Dense Matrix | 1 1 12.4s MPI Sparse API Extensions | 1 1 12.6s MPI Block Matrix Operations | 1 1 12.5s MPI Utilities | 1 1 11.7s MPI Local Constructors | 1 1 11.9s MPI Indexing | 1 1 11.9s MPI Factorization | 3 3 14.8s Mixed Sparse-Dense Operations | 1 1 13.5s MPI Repartition | 1 1 11.9s MPI map_rows | 1 1 12.4s RNG of the outermost testset: Random.Xoshiro(0x74d1072c45f34e1a, 0xafa00ac80bd7aae3, 0xa68541df17c5a4b2, 0xaa348115a2ac192a, 0x6bbaf877530b3663) ERROR: LoadError: Some tests did not pass: 0 passed, 17 failed, 0 errored, 0 broken. in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:36 Testing failed after 371.83s ERROR: LoadError: Package LinearAlgebraMPI errored during testing Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.12/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.12/Pkg/src/Operations.jl:2535 [3] test @ /opt/julia/share/julia/stdlib/v1.12/Pkg/src/Operations.jl:2384 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.12/Pkg/src/API.jl:538 [5] test(pkgs::Vector{PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.12/Pkg/src/API.jl:169 [6] test(pkgs::Vector{String}; kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.12/Pkg/src/API.jl:157 [7] test @ /opt/julia/share/julia/stdlib/v1.12/Pkg/src/API.jl:157 [inlined] [8] #test#81 @ /opt/julia/share/julia/stdlib/v1.12/Pkg/src/API.jl:156 [inlined] [9] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:223 [10] include(mod::Module, _path::String) @ Base ./Base.jl:306 [11] exec_options(opts::Base.JLOptions) @ Base ./client.jl:317 [12] _start() @ Base ./client.jl:550 in expression starting at /PkgEval.jl/scripts/evaluate.jl:214 PkgEval failed after 1199.14s: package fails to precompile