Package evaluation to test LinearAlgebraMPI on Julia 1.14.0-DEV.1563 (14ca1abc72*) started at 2026-01-15T17:37:09.929 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Activating project at `~/.julia/environments/v1.14` Set-up completed after 9.57s ################################################################################ # Installation # Installing LinearAlgebraMPI... Resolving package versions... Updating `~/.julia/environments/v1.14/Project.toml` [5bdd2be4] + LinearAlgebraMPI v0.1.9 Updating `~/.julia/environments/v1.14/Manifest.toml` [79e6a3ab] + Adapt v4.4.0 [a9b6321e] + Atomix v1.1.2 [8f478455] + Blake3Hash v0.3.0 [ffbed154] + DocStringExtensions v0.9.5 [692b3bcd] + JLLWrappers v1.7.1 [63c18a36] + KernelAbstractions v0.9.39 [5bdd2be4] + LinearAlgebraMPI v0.1.9 [da04e1cc] + MPI v0.20.23 [3da0fdf6] + MPIPreferences v0.1.11 [55d2b088] + MUMPS v1.5.5 [1914dd2f] + MacroTools v0.5.16 [eebad327] + PkgVersion v0.3.3 [aea7be01] + PrecompileTools v1.3.3 [21216c6a] + Preferences v1.5.1 [ae029012] + Requires v1.3.1 [fdea26ae] + SIMD v3.7.2 [90137ffa] + StaticArrays v1.9.16 [1e83bf80] + StaticArraysCore v1.4.4 [013be700] + UnsafeAtomics v0.3.0 [6e34b625] + Bzip2_jll v1.0.9+0 [e33a78d0] + Hwloc_jll v2.12.2+0 [94ce4f54] + Libiconv_jll v1.18.0+0 [d00139f3] + METIS_jll v5.1.3+0 [7cb0a576] + MPICH_jll v4.3.2+0 [f1f71cc9] + MPItrampoline_jll v5.5.4+0 [ca64183c] + MUMPS_jll v5.8.1+0 [9237b28f] + MicrosoftMPI_jll v10.1.4+3 [656ef2d0] + OpenBLAS32_jll v0.3.29+0 [fe0851c0] + OpenMPI_jll v5.0.9+0 [b247a4be] + PARMETIS_jll v4.0.6+2 ⌅ [aabda75e] + SCALAPACK32_jll v2.2.1+1 [a8d0f55d] + SCOTCH_jll v7.0.7+0 ⌅ [02c8fc9c] + XML2_jll v2.13.9+0 [ffd25f8a] + XZ_jll v5.8.2+0 [a65dc6b1] + Xorg_libpciaccess_jll v0.18.1+0 [0dad84c5] + ArgTools v1.1.2 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [f43a241f] + Downloads v1.7.0 [7b1f6079] + FileWatching v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [ac6e5ff7] + JuliaSyntaxHighlighting v1.13.0 [4af54fe1] + LazyArtifacts v1.11.0 [b27032c2] + LibCURL v1.0.0 [76f85450] + LibGit2 v1.11.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.13.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [ca575930] + NetworkOptions v1.3.0 [44cfe95a] + Pkg v1.14.0 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v1.0.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.13.0 [f489334b] + StyledStrings v1.13.0 [fa267f1f] + TOML v1.0.3 [a4e569a6] + Tar v1.10.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] + LibCURL_jll v8.18.0+0 [e37daf67] + LibGit2_jll v1.9.2+0 [29816b5a] + LibSSH2_jll v1.11.3+1 [14a3606d] + MozillaCACerts_jll v2025.12.2 [4536629a] + OpenBLAS_jll v0.3.29+0 [458c3c95] + OpenSSL_jll v3.5.4+0 [efcefdf7] + PCRE2_jll v10.47.0+0 [bea87d4a] + SuiteSparse_jll v7.10.1+0 [83775a58] + Zlib_jll v1.3.1+2 [3161d3a3] + Zstd_jll v1.5.7+1 [8e850b90] + libblastrampoline_jll v5.15.0+0 [8e850ede] + nghttp2_jll v1.68.0+1 [3f19e933] + p7zip_jll v17.7.0+0 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. To see why use `status --outdated -m` Installation completed after 11.65s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling package dependencies... Precompiling packages... 2719.6 ms ✓ Blake3Hash 6169.6 ms ✓ NCCL_jll WARNING: Imported binding Compiler.WorldView was undeclared at import time during import to GPUCompiler. ERROR: LoadError: UndefVarError: `WorldView` not defined in `GPUCompiler` Suggestion: this global was defined as `Compiler.WorldView` but not assigned a value. Stacktrace:  [1] ci_cache_lookup(cache::Compiler.OverlayCodeCache{Compiler.InternalCodeCache}, mi::Core.MethodInstance, min_world::UInt64, max_world::UInt64)  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/jlgen.jl:648  [2] compile_method_instance(job::GPUCompiler.CompilerJob)  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/jlgen.jl:839  [3] irgen(job::GPUCompiler.CompilerJob)  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/irgen.jl:4  [4] emit_llvm(job::GPUCompiler.CompilerJob; kwargs::@Kwargs{})  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:200  [5] emit_llvm(job::GPUCompiler.CompilerJob)  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:182  [6] compile_unhooked(output::Symbol, job::GPUCompiler.CompilerJob; kwargs::@Kwargs{})  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:95  [7] compile_unhooked  @ ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:80 [inlined]  [8] compile(target::Symbol, job::GPUCompiler.CompilerJob; kwargs::@Kwargs{})  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:67  [9] compile  @ ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:55 [inlined]  [10] #145  @ ~/.julia/packages/GPUCompiler/j4HFa/src/precompile.jl:35 [inlined]  [11] JuliaContext(f::GPUCompiler.var"#145#146"{GPUCompiler.CompilerJob{GPUCompiler.NativeCompilerTarget, GPUCompiler.var"##155".DummyCompilerParams}}; kwargs::@Kwargs{})  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:34  [12] JuliaContext(f::Function)  @ GPUCompiler ~/.julia/packages/GPUCompiler/j4HFa/src/driver.jl:25  [13] macro expansion  @ ~/.julia/packages/GPUCompiler/j4HFa/src/precompile.jl:34 [inlined]  [14] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:73 [inlined]  [15] macro expansion  @ ~/.julia/packages/GPUCompiler/j4HFa/src/precompile.jl:25 [inlined]  [16] macro expansion  @ ~/.julia/packages/PrecompileTools/gn08A/src/workloads.jl:121 [inlined]  [17] top-level scope  @ ~/.julia/packages/GPUCompiler/j4HFa/src/precompile.jl:118  [18] include(mapexpr::Function, mod::Module, _path::String)  @ Base ./Base.jl:310  [19] top-level scope  @ ~/.julia/packages/GPUCompiler/j4HFa/src/GPUCompiler.jl:70  [20] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [21] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3309  [22] top-level scope  @ stdin:5  [23] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [24] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3151  [25] include_string  @ ./loading.jl:3161 [inlined]  [26] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [27] _start()  @ Base ./client.jl:585 in expression starting at /home/pkgeval/.julia/packages/GPUCompiler/j4HFa/src/precompile.jl:3 in expression starting at /home/pkgeval/.julia/packages/GPUCompiler/j4HFa/src/GPUCompiler.jl:1 in expression starting at stdin:5 ✗ GPUCompiler 5112.8 ms ✓ SCALAPACK32_jll ERROR: LoadError: Precompiled image Base.PkgId(Base.UUID("61eb1bfa-7361-4325-ad38-22787b887f55"), "GPUCompiler") not available with flags CacheFlags(; use_pkgimages=false, debug_level=1, check_bounds=1, inline=true, opt_level=0) Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2873  [3] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2725  [4] macro expansion  @ ./loading.jl:2653 [inlined]  [5] macro expansion  @ ./lock.jl:376 [inlined]  [6] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2617  [7] require  @ ./loading.jl:2593 [inlined]  [8] eval_import_path  @ ./module.jl:36 [inlined]  [9] eval_import_path_all(at::Module, path::Expr, keyword::String)  @ Base ./module.jl:60  [10] _eval_using  @ ./module.jl:137 [inlined]  [11] _eval_using(to::Module, path::Expr)  @ Base ./module.jl:137  [12] top-level scope  @ ~/.julia/packages/CUDA/FJf6p/src/CUDA.jl:3  [13] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [14] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3309  [15] top-level scope  @ stdin:5  [16] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [17] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3151  [18] include_string  @ ./loading.jl:3161 [inlined]  [19] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [20] _start()  @ Base ./client.jl:585 in expression starting at /home/pkgeval/.julia/packages/CUDA/FJf6p/src/CUDA.jl:1 in expression starting at stdin:5 ✗ CUDA 4996.6 ms ✓ MUMPS_jll ERROR: LoadError: Precompiled image Base.PkgId(Base.UUID("052768ef-5323-5732-b1bb-66c8b64840ba"), "CUDA") not available with flags CacheFlags(; use_pkgimages=false, debug_level=1, check_bounds=1, inline=true, opt_level=0) Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2873  [3] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2725  [4] macro expansion  @ ./loading.jl:2653 [inlined]  [5] macro expansion  @ ./lock.jl:376 [inlined]  [6] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2617  [7] require  @ ./loading.jl:2593 [inlined]  [8] eval_import_path  @ ./module.jl:36 [inlined]  [9] eval_import_path_all(at::Module, path::Expr, keyword::String)  @ Base ./module.jl:60  [10] _eval_import(::Bool, ::Module, ::Expr, ::Expr, ::Vararg{Expr})  @ Base ./module.jl:101  [11] top-level scope  @ ~/.julia/packages/Atomix/0UMek/ext/AtomixCUDAExt.jl:5  [12] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [13] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3309  [14] top-level scope  @ stdin:5  [15] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [16] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3151  [17] include_string  @ ./loading.jl:3161 [inlined]  [18] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [19] _start()  @ Base ./client.jl:585 in expression starting at /home/pkgeval/.julia/packages/Atomix/0UMek/ext/AtomixCUDAExt.jl:2 in expression starting at stdin:5 ✗ Atomix → AtomixCUDAExt ERROR: LoadError: Precompiled image Base.PkgId(Base.UUID("052768ef-5323-5732-b1bb-66c8b64840ba"), "CUDA") not available with flags CacheFlags(; use_pkgimages=false, debug_level=1, check_bounds=1, inline=true, opt_level=0) Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2873  [3] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2725  [4] macro expansion  @ ./loading.jl:2653 [inlined]  [5] macro expansion  @ ./lock.jl:376 [inlined]  [6] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2617  [7] require  @ ./loading.jl:2593 [inlined]  [8] eval_import_path  @ ./module.jl:36 [inlined]  [9] _eval_import(imported::Bool, to::Module, from::Nothing, paths::Expr)  @ Base ./module.jl:111  [10] top-level scope  @ ~/.julia/packages/MPI/hNJm0/ext/CUDAExt.jl:4  [11] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [12] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3309  [13] top-level scope  @ stdin:5  [14] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [15] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3151  [16] include_string  @ ./loading.jl:3161 [inlined]  [17] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [18] _start()  @ Base ./client.jl:585 in expression starting at /home/pkgeval/.julia/packages/MPI/hNJm0/ext/CUDAExt.jl:1 in expression starting at stdin:5 ✗ MPI → CUDAExt 6658.6 ms ✓ MUMPS ERROR: LoadError: Precompiled image Base.PkgId(Base.UUID("052768ef-5323-5732-b1bb-66c8b64840ba"), "CUDA") not available with flags CacheFlags(; use_pkgimages=false, debug_level=1, check_bounds=1, inline=true, opt_level=0) Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2873  [3] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2725  [4] macro expansion  @ ./loading.jl:2653 [inlined]  [5] macro expansion  @ ./lock.jl:376 [inlined]  [6] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2617  [7] require  @ ./loading.jl:2593 [inlined]  [8] eval_import_path  @ ./module.jl:36 [inlined]  [9] eval_import_path_all(at::Module, path::Expr, keyword::String)  @ Base ./module.jl:60  [10] _eval_using  @ ./module.jl:137 [inlined]  [11] _eval_using(to::Module, path::Expr)  @ Base ./module.jl:137  [12] top-level scope  @ ~/.julia/packages/NCCL/wRgZg/src/NCCL.jl:3  [13] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [14] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3309  [15] top-level scope  @ stdin:5  [16] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [17] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3151  [18] include_string  @ ./loading.jl:3161 [inlined]  [19] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [20] _start()  @ Base ./client.jl:585 in expression starting at /home/pkgeval/.julia/packages/NCCL/wRgZg/src/NCCL.jl:1 in expression starting at stdin:5 ✗ NCCL 41402.0 ms ✓ LinearAlgebraMPI ERROR: LoadError: Precompiled image Base.PkgId(Base.UUID("052768ef-5323-5732-b1bb-66c8b64840ba"), "CUDA") not available with flags CacheFlags(; use_pkgimages=false, debug_level=1, check_bounds=1, inline=true, opt_level=0) Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2873  [3] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2725  [4] macro expansion  @ ./loading.jl:2653 [inlined]  [5] macro expansion  @ ./lock.jl:376 [inlined]  [6] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2617  [7] require  @ ./loading.jl:2593 [inlined]  [8] eval_import_path  @ ./module.jl:36 [inlined]  [9] eval_import_path_all(at::Module, path::Expr, keyword::String)  @ Base ./module.jl:60  [10] _eval_using  @ ./module.jl:137 [inlined]  [11] _eval_using(to::Module, path::Expr)  @ Base ./module.jl:137  [12] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/ext/LinearAlgebraMPICUDAExt.jl:14  [13] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [14] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3309  [15] top-level scope  @ stdin:5  [16] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [17] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3151  [18] include_string  @ ./loading.jl:3161 [inlined]  [19] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [20] _start()  @ Base ./client.jl:585 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/ext/LinearAlgebraMPICUDAExt.jl:1 in expression starting at stdin:5 ✗ LinearAlgebraMPI → LinearAlgebraMPICUDAExt 6 dependencies successfully precompiled in 137 seconds. 124 already precompiled. Precompilation completed after 183.08s ################################################################################ # Testing # Testing LinearAlgebraMPI Status `/tmp/jl_W4uier/Project.toml` [79e6a3ab] Adapt v4.4.0 [8f478455] Blake3Hash v0.3.0 [052768ef] CUDA v5.9.6 [63c18a36] KernelAbstractions v0.9.39 [5bdd2be4] LinearAlgebraMPI v0.1.9 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [55d2b088] MUMPS v1.5.5 [3fe64909] NCCL v0.1.2 [aea7be01] PrecompileTools v1.3.3 [90137ffa] StaticArrays v1.9.16 [4889d778] CUDSS_jll v0.7.1+0 [37e2e46d] LinearAlgebra v1.13.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.13.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_W4uier/Manifest.toml` [621f4979] AbstractFFTs v1.5.0 [79e6a3ab] Adapt v4.4.0 [a9b6321e] Atomix v1.1.2 [ab4f0b2a] BFloat16s v0.6.0 [8f478455] Blake3Hash v0.3.0 [fa961155] CEnum v0.5.0 [052768ef] CUDA v5.9.6 [1af6417a] CUDA_Runtime_Discovery v1.0.0 [34da2185] Compat v4.18.1 [a8cc5b0e] Crayons v4.1.1 [9a962f9c] DataAPI v1.16.0 [a93c6f00] DataFrames v1.8.1 [864edb3b] DataStructures v0.19.3 [e2d170a0] DataValueInterfaces v1.0.0 [ffbed154] DocStringExtensions v0.9.5 [e2ba6199] ExprTools v0.1.10 [0c68f7d7] GPUArrays v11.3.4 [46192b85] GPUArraysCore v0.2.0 [61eb1bfa] GPUCompiler v1.7.5 [096a3bc2] GPUToolbox v1.0.0 [076d061b] HashArrayMappedTries v0.2.0 [842dd82b] InlineStrings v1.4.5 [41ab1584] InvertedIndices v1.3.1 [82899510] IteratorInterfaceExtensions v1.0.0 [692b3bcd] JLLWrappers v1.7.1 [63c18a36] KernelAbstractions v0.9.39 [929cbde3] LLVM v9.4.4 [8b046642] LLVMLoopInfo v1.0.0 [b964fa9f] LaTeXStrings v1.4.0 [5bdd2be4] LinearAlgebraMPI v0.1.9 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [55d2b088] MUMPS v1.5.5 [1914dd2f] MacroTools v0.5.16 [e1d29d7a] Missings v1.2.0 [3fe64909] NCCL v0.1.2 [5da4648a] NVTX v1.0.3 [bac558e1] OrderedCollections v1.8.1 [eebad327] PkgVersion v0.3.3 [2dfb63ee] PooledArrays v1.4.3 [aea7be01] PrecompileTools v1.3.3 [21216c6a] Preferences v1.5.1 [08abe8d2] PrettyTables v3.1.2 [74087812] Random123 v1.7.1 [e6cf234a] RandomNumbers v1.6.0 [189a3867] Reexport v1.2.2 [ae029012] Requires v1.3.1 [fdea26ae] SIMD v3.7.2 [7e506255] ScopedValues v1.5.0 [6c6a2e73] Scratch v1.3.0 [91c51154] SentinelArrays v1.4.9 [a2af1166] SortingAlgorithms v1.2.2 [90137ffa] StaticArrays v1.9.16 [1e83bf80] StaticArraysCore v1.4.4 [10745b16] Statistics v1.11.1 [892a3eda] StringManipulation v0.4.2 [3783bdb8] TableTraits v1.0.1 [bd369af6] Tables v1.12.1 [e689c965] Tracy v0.1.6 [013be700] UnsafeAtomics v0.3.0 [6e34b625] Bzip2_jll v1.0.9+0 [d1e2174e] CUDA_Compiler_jll v0.4.1+1 [4ee394cb] CUDA_Driver_jll v13.1.0+2 ⌅ [76a88914] CUDA_Runtime_jll v0.19.2+0 [4889d778] CUDSS_jll v0.7.1+0 [e33a78d0] Hwloc_jll v2.12.2+0 [9c1d0b0a] JuliaNVTXCallbacks_jll v0.2.1+0 [dad2f222] LLVMExtra_jll v0.0.38+0 [ad6e5548] LibTracyClient_jll v0.13.1+0 [94ce4f54] Libiconv_jll v1.18.0+0 [d00139f3] METIS_jll v5.1.3+0 [7cb0a576] MPICH_jll v4.3.2+0 [f1f71cc9] MPItrampoline_jll v5.5.4+0 [ca64183c] MUMPS_jll v5.8.1+0 [9237b28f] MicrosoftMPI_jll v10.1.4+3 [4d6d38e4] NCCL_jll v2.28.3+0 [e98f9f5b] NVTX_jll v3.2.2+0 [656ef2d0] OpenBLAS32_jll v0.3.29+0 [fe0851c0] OpenMPI_jll v5.0.9+0 [b247a4be] PARMETIS_jll v4.0.6+2 ⌅ [aabda75e] SCALAPACK32_jll v2.2.1+1 [a8d0f55d] SCOTCH_jll v7.0.7+0 ⌅ [02c8fc9c] XML2_jll v2.13.9+0 [ffd25f8a] XZ_jll v5.8.2+0 [a65dc6b1] Xorg_libpciaccess_jll v0.18.1+0 [1e29f10c] demumble_jll v1.3.0+0 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.7.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [ac6e5ff7] JuliaSyntaxHighlighting v1.13.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v1.0.0 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [ca575930] NetworkOptions v1.3.0 [44cfe95a] Pkg v1.14.0 [de0858da] Printf v1.11.0 [3fa0cd96] REPL v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v1.0.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.13.0 [f489334b] StyledStrings v1.13.0 [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] LibCURL_jll v8.18.0+0 [e37daf67] LibGit2_jll v1.9.2+0 [29816b5a] LibSSH2_jll v1.11.3+1 [14a3606d] MozillaCACerts_jll v2025.12.2 [4536629a] OpenBLAS_jll v0.3.29+0 [458c3c95] OpenSSL_jll v3.5.4+0 [efcefdf7] PCRE2_jll v10.47.0+0 [bea87d4a] SuiteSparse_jll v7.10.1+0 [83775a58] Zlib_jll v1.3.1+2 [3161d3a3] Zstd_jll v1.5.7+1 [8e850b90] libblastrampoline_jll v5.15.0+0 [8e850ede] nghttp2_jll v1.68.0+1 [3f19e933] p7zip_jll v17.7.0+0 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. Testing Running tests... ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for MUMPS [55d2b088-9f4e-11e9-26c0-150b02ea6a46] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for MUMPS_jll [ca64183c-ec4f-5579-95d5-17e128c21291] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for SCALAPACK32_jll [aabda75e-bfe4-5a37-92e3-ffe54af3c273] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 Precompilation complete for test environment Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe2530f300) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_matrix_multiplication.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_matrix_multiplication.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Matrix Multiplication: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:38 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:38 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffc2b25a480) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_transpose.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_transpose.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Transpose: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:41 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:41 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffd0403a670) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_addition.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_addition.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Addition: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:44 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:44 [inlined] ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 MUMPS Being precompiled by another process (pid: 418, pidfile: /home/pkgeval/.julia/compiled/v1.14/MUMPS/HIYWh_FHeQ4.ji.pidfile) ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for MUMPS [55d2b088-9f4e-11e9-26c0-150b02ea6a46] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 Task failed TypeError(:typeassert, "", Tuple{String, Union{Nothing, String}}, false) ERROR: Stacktrace: [1] (::Base.Precompilation.var"#_precompilepkgs##27#_precompilepkgs##28"{Bool, Bool, IOContext{IO}, Bool, Base.RefValue{Union{Nothing, Base.PkgId}}, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, String}, Set{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}}, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, IOBuffer}, Base.Precompilation.var"#handle_interrupt#_precompilepkgs##18"{IOContext{IO}, Base.RefValue{Task}, Base.RefValue{Bool}, String, Base.RefValue{Bool}, Base.Event, ReentrantLock, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, Base.Event}}, Base.RefValue{Int64}, Base.RefValue{Int64}, Base.RefValue{Int64}, String, Base.RefValue{Bool}, Base.Event, ReentrantLock, Vector{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}}, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, String}, Vector{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}}, Base.RefValue{Union{Nothing, String}}, Vector{Base.PkgId}, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, Bool}, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, Base.Event}, Dict{Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}, Bool}, Vector{Base.PkgId}, Vector{Base.PkgId}, Dict{Base.PkgId, Vector{Base.PkgId}}, Base.Precompilation.var"#describe_pkg#_precompilepkgs##1"{Dict{Base.PkgId, Base.PkgId}, Base.Precompilation.var"#color_string#_precompilepkgs##0"{Bool}, Int64}, Dict{Base.PkgId, Base.PkgId}, Dict{Base.PkgId, Vector{String}}, Dict{Tuple{Base.PkgId, UInt128, Base.PkgLoadSpec, String, Bool, Base.CacheFlags}, Bool}, Base.Precompilation.var"#color_string#_precompilepkgs##0"{Bool}, Bool, Base.CoreLogging.LogLevel, IOContext{IO}, Bool, Base.Semaphore, Vector{Base.PkgId}, Bool, Base.PkgLoadSpec, Vector{String}, Vector{String}, Vector{Base.PkgId}, Base.PkgId, Cmd, Base.CacheFlags, Pair{Cmd, Base.CacheFlags}, Tuple{Base.PkgId, Pair{Cmd, Base.CacheFlags}}})() @ Base.Precompilation ./precompilation.jl:1145 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for MUMPS_jll [ca64183c-ec4f-5579-95d5-17e128c21291] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for SCALAPACK32_jll [aabda75e-bfe4-5a37-92e3-ffe54af3c273] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for MUMPS [55d2b088-9f4e-11e9-26c0-150b02ea6a46] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for MUMPS_jll [ca64183c-ec4f-5579-95d5-17e128c21291] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for SCALAPACK32_jll [aabda75e-bfe4-5a37-92e3-ffe54af3c273] │ exception = Required dependency Base.PkgId(Base.UUID("656ef2d0-ae68-5445-9ca0-591084a874a2"), "OpenBLAS32_jll") failed to load from a cache file. └ @ Base loading.jl:2950 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fff28a9f100) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Lazy Transpose: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:47 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:47 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffd504a7c50) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_vector_multiplication.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_vector_multiplication.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Vector Multiplication: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:50 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:50 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffffa61b360) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_dense_matrix.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_dense_matrix.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Dense Matrix: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:53 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:53 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffea140ab60) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_sparse_api.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_sparse_api.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Sparse API Extensions: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:56 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:56 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fffdfc721a0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_blocks.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_blocks.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Block Matrix Operations: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:59 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:59 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffef4029370) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe73fae470) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_utilities.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_utilities.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Utilities: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:62 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:62 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffd0594fe70) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_local_constructors.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_local_constructors.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Local Constructors: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:65 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:65 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffc803ebdc0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_indexing.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_indexing.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Indexing: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:68 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:68 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe69c2a840) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:85 Expression: occursin("Pass:", output) Evaluated: occursin("Pass:", "") Stacktrace: [1] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [2] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:85 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:86 Expression: occursin("Fail: 0", output) Evaluated: occursin("Fail: 0", "") Stacktrace: [1] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [2] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:86 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:87 Expression: occursin("Error: 0", output) Evaluated: occursin("Error: 0", "") Stacktrace: [1] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [2] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:87 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fffe93549a0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_new_operations.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_new_operations.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" Mixed Sparse-Dense Operations: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:90 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:90 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fff4183ee60) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_repartition.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_repartition.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI Repartition: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:93 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:93 [inlined] Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fffc02185c0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-8D79qcgD (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_map_rows.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_W4uier/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_map_rows.jl`,["JULIA_NUM_PRECOMPILE_TASKS=1", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "JULIA_PKG_PRECOMPILE_AUTO=0", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "CI=true", "UCX_MEMTYPE_CACHE=no", "OPENBLAS_NUM_THREADS=1", "HOME=/home/pkgeval", "JULIA_PKGEVAL=true" … "OPENBLAS_MAIN_FREE=1", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "JULIA_CPU_THREADS=1", "DISPLAY=:1", "PKGEVAL=true", "JULIA_LOAD_PATH=@:/tmp/jl_W4uier", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "PYTHON=", "LANG=C.UTF-8", "R_HOME=*"]) └ active_proj = "/tmp/jl_W4uier/Project.toml" MPI map_rows: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:773 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] kwcall(::@NamedTuple{nprocs::Int64, expect_success::Bool}, ::typeof(run_mpi_test), test_file::String) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:16 [4] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [6] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:96 [inlined] [7] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2156 [inlined] [8] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:96 [inlined] Test Summary: | Fail Total Time LinearAlgebraMPI Tests | 17 17 6m47.0s MPI Matrix Multiplication | 1 1 18.6s MPI Transpose | 1 1 12.3s MPI Addition | 1 1 12.8s MPI Lazy Transpose | 1 1 3m43.1s MPI Vector Multiplication | 1 1 12.6s MPI Dense Matrix | 1 1 12.8s MPI Sparse API Extensions | 1 1 12.8s MPI Block Matrix Operations | 1 1 12.2s MPI Utilities | 1 1 12.7s MPI Local Constructors | 1 1 12.5s MPI Indexing | 1 1 12.7s MPI Factorization | 3 3 12.9s Mixed Sparse-Dense Operations | 1 1 13.0s MPI Repartition | 1 1 13.1s MPI map_rows | 1 1 13.0s RNG of the outermost testset: Random.Xoshiro(0xb40422e7a7a4140f, 0x46f8f5282d39a706, 0x87af9058b9159218, 0x7392ca52293bc5d1, 0x58605c5aed5f4c50) ERROR: LoadError: Some tests did not pass: 0 passed, 17 failed, 0 errored, 0 broken. in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:36 Testing failed after 210.51s ERROR: LoadError: Package LinearAlgebraMPI errored during testing Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Operations.jl:3122 [3] test @ /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Operations.jl:2987 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:572 [5] kwcall(::@NamedTuple{julia_args::Cmd, io::IOContext{IO}}, ::typeof(Pkg.API.test), ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:548 [6] test(pkgs::Vector{PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:172 [7] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:161 [8] test(pkgs::Vector{String}; kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:160 [9] test @ /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:160 [inlined] [10] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkg::String) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:159 [11] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:237 [12] include(mod::Module, _path::String) @ Base ./Base.jl:309 [13] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [14] _start() @ Base ./client.jl:585 in expression starting at /PkgEval.jl/scripts/evaluate.jl:228 PkgEval failed after 834.03s: package has test failures