Package evaluation to test LinearAlgebraMPI on Julia 1.11.8 (29b3528cce*) started at 2026-01-20T06:46:50.328 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Activating project at `~/.julia/environments/v1.11` Set-up completed after 8.94s ################################################################################ # Installation # Installing LinearAlgebraMPI... Resolving package versions... Updating `~/.julia/environments/v1.11/Project.toml` [5bdd2be4] + LinearAlgebraMPI v0.1.9 Updating `~/.julia/environments/v1.11/Manifest.toml` [79e6a3ab] + Adapt v4.4.0 [a9b6321e] + Atomix v1.1.2 [8f478455] + Blake3Hash v0.3.0 [ffbed154] + DocStringExtensions v0.9.5 [692b3bcd] + JLLWrappers v1.7.1 [63c18a36] + KernelAbstractions v0.9.39 [5bdd2be4] + LinearAlgebraMPI v0.1.9 [da04e1cc] + MPI v0.20.23 [3da0fdf6] + MPIPreferences v0.1.11 [55d2b088] + MUMPS v1.6.0 [1914dd2f] + MacroTools v0.5.16 [eebad327] + PkgVersion v0.3.3 ⌅ [aea7be01] + PrecompileTools v1.2.1 [21216c6a] + Preferences v1.5.1 [ae029012] + Requires v1.3.1 [fdea26ae] + SIMD v3.7.2 [90137ffa] + StaticArrays v1.9.16 [1e83bf80] + StaticArraysCore v1.4.4 [013be700] + UnsafeAtomics v0.3.0 [6e34b625] + Bzip2_jll v1.0.9+0 [e33a78d0] + Hwloc_jll v2.12.2+0 [94ce4f54] + Libiconv_jll v1.18.0+0 [d00139f3] + METIS_jll v5.1.3+0 [7cb0a576] + MPICH_jll v4.3.2+0 [f1f71cc9] + MPItrampoline_jll v5.5.4+0 [ca64183c] + MUMPS_jll v5.8.2+0 [9237b28f] + MicrosoftMPI_jll v10.1.4+3 [656ef2d0] + OpenBLAS32_jll v0.3.29+0 [fe0851c0] + OpenMPI_jll v5.0.9+0 [b247a4be] + PARMETIS_jll v4.0.6+2 [aabda75e] + SCALAPACK32_jll v2.2.2+0 [a8d0f55d] + SCOTCH_jll v7.0.7+0 ⌅ [02c8fc9c] + XML2_jll v2.13.9+0 [ffd25f8a] + XZ_jll v5.8.2+0 [a65dc6b1] + Xorg_libpciaccess_jll v0.18.1+0 [0dad84c5] + ArgTools v1.1.2 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [f43a241f] + Downloads v1.6.0 [7b1f6079] + FileWatching v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [4af54fe1] + LazyArtifacts v1.11.0 [b27032c2] + LibCURL v0.6.4 [76f85450] + LibGit2 v1.11.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.11.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [ca575930] + NetworkOptions v1.2.0 [44cfe95a] + Pkg v1.11.0 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v0.7.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.11.0 [fa267f1f] + TOML v1.0.3 [a4e569a6] + Tar v1.10.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.1.1+0 [deac9b47] + LibCURL_jll v8.6.0+0 [e37daf67] + LibGit2_jll v1.7.2+0 [29816b5a] + LibSSH2_jll v1.11.0+1 [c8ffd9c3] + MbedTLS_jll v2.28.6+0 [14a3606d] + MozillaCACerts_jll v2023.12.12 [4536629a] + OpenBLAS_jll v0.3.27+1 [bea87d4a] + SuiteSparse_jll v7.7.0+0 [83775a58] + Zlib_jll v1.2.13+1 [8e850b90] + libblastrampoline_jll v5.11.0+0 [8e850ede] + nghttp2_jll v1.59.0+0 [3f19e933] + p7zip_jll v17.4.0+2 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. To see why use `status --outdated -m` Installation completed after 11.1s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling package dependencies... Precompiling project... 5620.2 ms ✓ NCCL_jll ✗ LinearAlgebraMPI ✗ Atomix → AtomixCUDAExt ✗ MPI → CUDAExt ✗ NCCL ✗ LinearAlgebraMPI → LinearAlgebraMPICUDAExt 1 dependency successfully precompiled in 207 seconds. 128 already precompiled. ERROR: LoadError: The following 4 direct dependencies failed to precompile: LinearAlgebraMPI Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPI/jl_RN86y7". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base.jl:126 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base.jl:52 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include @ ./Base.jl:562 [inlined] [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:2924 [12] top-level scope @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:6 NCCL Failed to precompile NCCL [3fe64909-d7a1-4096-9b7d-7a0f12cf0f6b] to "/home/pkgeval/.julia/compiled/v1.11/NCCL/jl_a6DoeP". julia: /source/src/gc.c:1315: jl_gc_pool_alloc_inner: Assertion `__extension__ ({ __auto_type __atomic_load_ptr = (&ptls->gc_state); __typeof__ (*__atomic_load_ptr) __atomic_load_tmp; __atomic_load (__atomic_load_ptr, &__atomic_load_tmp, (memory_order_relaxed)); __atomic_load_tmp; }) == 0' failed. [282] signal 6 (-6): Aborted in expression starting at /home/pkgeval/.julia/packages/NCCL/wRgZg/src/NCCL.jl:3 unknown function (ip: 0x7719909c7ebc) gsignal at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) abort at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) unknown function (ip: 0x771990963394) __assert_fail at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) jl_gc_pool_alloc_inner at /source/src/gc.c:1315 ijl_alloc_string at /source/src/array.c:314 [inlined] ijl_alloc_string at /source/src/array.c:297 ijl_pchar_to_string at /source/src/array.c:330 ijl_vexceptionf at /source/src/rtutils.c:64 ijl_errorf at /source/src/rtutils.c:77 ijl_load_dynamic_library at /source/src/dlload.c:388 jl_get_library_ at /source/src/runtime_ccall.cpp:45 [inlined] jl_get_library_ at /source/src/runtime_ccall.cpp:29 ijl_lazy_load_and_lookup at /source/src/runtime_ccall.cpp:73 macro expansion at /home/pkgeval/.julia/packages/GPUToolbox/JLBB1/src/ccalls.jl:143 [inlined] macro expansion at /home/pkgeval/.julia/packages/CUDA/FJf6p/lib/cudadrv/libcuda.jl:3853 [inlined] #174 at /home/pkgeval/.julia/packages/GPUToolbox/JLBB1/src/ccalls.jl:34 unknown function (ip: 0x771987f24f2f) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x771987f24d6c) unknown function (ip: 0x771987f24cf0) check at /home/pkgeval/.julia/packages/CUDA/FJf6p/lib/cudadrv/libcuda.jl:35 unknown function (ip: 0x771987f24c42) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x771987f24bbc) unknown function (ip: 0x771987f24b3b) cuDriverGetVersion at /home/pkgeval/.julia/packages/GPUToolbox/JLBB1/src/ccalls.jl:33 unknown function (ip: 0x771987f24a72) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x771987f249cc) unknown function (ip: 0x771987f24960) set_driver_version at /home/pkgeval/.julia/packages/CUDA/FJf6p/lib/cudadrv/version.jl:8 unknown function (ip: 0x771987f248ba) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x771987f2439c) unknown function (ip: 0x771987f2408c) __init__ at /home/pkgeval/.julia/packages/CUDA/FJf6p/src/initialization.jl:62 unknown function (ip: 0x771987f23fe6) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] jl_module_run_initializer at /source/src/toplevel.c:76 run_module_init at ./loading.jl:1378 register_restored_modules at ./loading.jl:1366 #_include_from_serialized#1082 at ./loading.jl:1254 _include_from_serialized at ./loading.jl:1210 [inlined] _include_from_serialized at ./loading.jl:1210 [inlined] #_require_search_from_serialized#1105 at ./loading.jl:2057 _require_search_from_serialized at ./loading.jl:1969 jfptr__require_search_from_serialized_45031.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 _require at ./loading.jl:2527 __require_prelocked at ./loading.jl:2388 jfptr___require_prelocked_71351.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] jl_f__call_in_world at /source/src/builtins.c:894 #invoke_in_world#3 at ./essentials.jl:1089 [inlined] invoke_in_world at ./essentials.jl:1086 [inlined] _require_prelocked at ./loading.jl:2375 macro expansion at ./loading.jl:2314 [inlined] macro expansion at ./lock.jl:273 [inlined] __require at ./loading.jl:2271 jfptr___require_71282.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] jl_f__call_in_world at /source/src/builtins.c:894 #invoke_in_world#3 at ./essentials.jl:1089 [inlined] invoke_in_world at ./essentials.jl:1086 [inlined] require at ./loading.jl:2260 jfptr_require_71279.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] call_require at /source/src/toplevel.c:486 [inlined] eval_import_path at /source/src/toplevel.c:523 jl_toplevel_eval_flex at /source/src/toplevel.c:759 jl_eval_module_expr at /source/src/toplevel.c:215 [inlined] jl_toplevel_eval_flex at /source/src/toplevel.c:743 jl_toplevel_eval_flex at /source/src/toplevel.c:886 ijl_toplevel_eval_in at /source/src/toplevel.c:994 eval at ./boot.jl:430 [inlined] include_string at ./loading.jl:2777 _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 _include at ./loading.jl:2837 include at ./Base.jl:562 [inlined] include_package_for_output at ./loading.jl:2924 jfptr_include_package_for_output_72244.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] do_call at /source/src/interpreter.c:126 eval_value at /source/src/interpreter.c:223 eval_stmt_value at /source/src/interpreter.c:174 [inlined] eval_body at /source/src/interpreter.c:670 jl_interpret_toplevel_thunk at /source/src/interpreter.c:824 jl_toplevel_eval_flex at /source/src/toplevel.c:943 jl_toplevel_eval_flex at /source/src/toplevel.c:886 ijl_toplevel_eval_in at /source/src/toplevel.c:994 eval at ./boot.jl:430 [inlined] include_string at ./loading.jl:2777 include_string at ./loading.jl:2787 [inlined] exec_options at ./client.jl:314 _start at ./client.jl:524 jfptr__start_73686.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] true_main at /source/src/jlapi.c:900 jl_repl_entrypoint at /source/src/jlapi.c:1059 main at /source/cli/loader_exe.c:58 unknown function (ip: 0x771990964249) __libc_start_main at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) unknown function (ip: 0x4010b8) Allocations: 4849068 (Pool: 4848892; Big: 176); GC: 9 LinearAlgebraMPICUDAExt Failed to precompile LinearAlgebraMPICUDAExt [386e412e-22e3-592f-a311-c0c588906e07] to "/home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPICUDAExt/jl_NdCABH". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base.jl:126 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base.jl:52 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include @ ./Base.jl:562 [inlined] [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:2924 [12] top-level scope @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:6 ERROR: LoadError: Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPI/jl_edwZbX". Stacktrace: [1] error(s::String) @ Base ./error.jl:35 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3217 [3] (::Base.var"#1110#1111"{Base.PkgId})() @ Base ./loading.jl:2579 [4] mkpidlock(f::Base.var"#1110#1111"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.11/FileWatching/src/pidfile.jl:95 [5] #mkpidlock#6 @ /opt/julia/share/julia/stdlib/v1.11/FileWatching/src/pidfile.jl:90 [inlined] [6] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.11/FileWatching/src/pidfile.jl:116 [7] #invokelatest#2 @ ./essentials.jl:1057 [inlined] [8] invokelatest @ ./essentials.jl:1052 [inlined] [9] maybe_cachefile_lock(f::Base.var"#1110#1111"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3741 [10] maybe_cachefile_lock @ ./loading.jl:3738 [inlined] [11] _require(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2565 [12] __require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2388 [13] #invoke_in_world#3 @ ./essentials.jl:1089 [inlined] [14] invoke_in_world @ ./essentials.jl:1086 [inlined] [15] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2375 [16] macro expansion @ ./loading.jl:2314 [inlined] [17] macro expansion @ ./lock.jl:273 [inlined] [18] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2271 [19] #invoke_in_world#3 @ ./essentials.jl:1089 [inlined] [20] invoke_in_world @ ./essentials.jl:1086 [inlined] [21] require(into::Module, mod::Symbol) @ Base ./loading.jl:2260 [22] include @ ./Base.jl:562 [inlined] [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:2924 [24] top-level scope @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/ext/LinearAlgebraMPICUDAExt.jl:1 in expression starting at stdin:6 CUDAExt Failed to precompile CUDAExt [11b7e2e0-d079-575b-885e-0ab22ef3252c] to "/home/pkgeval/.julia/compiled/v1.11/CUDAExt/jl_pjw1At". julia: /source/src/gc.c:1315: jl_gc_pool_alloc_inner: Assertion `__extension__ ({ __auto_type __atomic_load_ptr = (&ptls->gc_state); __typeof__ (*__atomic_load_ptr) __atomic_load_tmp; __atomic_load (__atomic_load_ptr, &__atomic_load_tmp, (memory_order_relaxed)); __atomic_load_tmp; }) == 0' failed. [269] signal 6 (-6): Aborted in expression starting at /home/pkgeval/.julia/packages/MPI/hNJm0/ext/CUDAExt.jl:4 unknown function (ip: 0x7c25bd324ebc) gsignal at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) abort at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) unknown function (ip: 0x7c25bd2c0394) __assert_fail at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) jl_gc_pool_alloc_inner at /source/src/gc.c:1315 ijl_alloc_string at /source/src/array.c:314 [inlined] ijl_alloc_string at /source/src/array.c:297 ijl_pchar_to_string at /source/src/array.c:330 ijl_vexceptionf at /source/src/rtutils.c:64 ijl_errorf at /source/src/rtutils.c:77 ijl_load_dynamic_library at /source/src/dlload.c:388 jl_get_library_ at /source/src/runtime_ccall.cpp:45 [inlined] jl_get_library_ at /source/src/runtime_ccall.cpp:29 ijl_lazy_load_and_lookup at /source/src/runtime_ccall.cpp:73 macro expansion at /home/pkgeval/.julia/packages/GPUToolbox/JLBB1/src/ccalls.jl:143 [inlined] macro expansion at /home/pkgeval/.julia/packages/CUDA/FJf6p/lib/cudadrv/libcuda.jl:3853 [inlined] #174 at /home/pkgeval/.julia/packages/GPUToolbox/JLBB1/src/ccalls.jl:34 unknown function (ip: 0x7c25b472c3ef) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x7c25b472c22c) unknown function (ip: 0x7c25b472c1b0) check at /home/pkgeval/.julia/packages/CUDA/FJf6p/lib/cudadrv/libcuda.jl:35 unknown function (ip: 0x7c25b472c102) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x7c25b472c07c) unknown function (ip: 0x7c25b472bffb) cuDriverGetVersion at /home/pkgeval/.julia/packages/GPUToolbox/JLBB1/src/ccalls.jl:33 unknown function (ip: 0x7c25b472bf32) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x7c25b472be8c) unknown function (ip: 0x7c25b472be20) set_driver_version at /home/pkgeval/.julia/packages/CUDA/FJf6p/lib/cudadrv/version.jl:8 unknown function (ip: 0x7c25b472bd7a) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_invoke at /source/src/gf.c:2958 unknown function (ip: 0x7c25b472b85c) unknown function (ip: 0x7c25b472b54c) __init__ at /home/pkgeval/.julia/packages/CUDA/FJf6p/src/initialization.jl:62 unknown function (ip: 0x7c25b472b4a6) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] jl_module_run_initializer at /source/src/toplevel.c:76 run_module_init at ./loading.jl:1378 register_restored_modules at ./loading.jl:1366 #_include_from_serialized#1082 at ./loading.jl:1254 _include_from_serialized at ./loading.jl:1210 [inlined] _include_from_serialized at ./loading.jl:1210 [inlined] #_require_search_from_serialized#1105 at ./loading.jl:2057 _require_search_from_serialized at ./loading.jl:1969 jfptr__require_search_from_serialized_45031.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 _require at ./loading.jl:2527 __require_prelocked at ./loading.jl:2388 jfptr___require_prelocked_71351.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] jl_f__call_in_world at /source/src/builtins.c:894 #invoke_in_world#3 at ./essentials.jl:1089 [inlined] invoke_in_world at ./essentials.jl:1086 [inlined] _require_prelocked at ./loading.jl:2375 macro expansion at ./loading.jl:2314 [inlined] macro expansion at ./lock.jl:273 [inlined] __require at ./loading.jl:2271 jfptr___require_71282.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] jl_f__call_in_world at /source/src/builtins.c:894 #invoke_in_world#3 at ./essentials.jl:1089 [inlined] invoke_in_world at ./essentials.jl:1086 [inlined] require at ./loading.jl:2260 jfptr_require_71279.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] call_require at /source/src/toplevel.c:486 [inlined] eval_import_path at /source/src/toplevel.c:523 jl_toplevel_eval_flex at /source/src/toplevel.c:812 eval_body at /source/src/interpreter.c:632 jl_interpret_toplevel_thunk at /source/src/interpreter.c:824 jl_toplevel_eval_flex at /source/src/toplevel.c:943 jl_eval_module_expr at /source/src/toplevel.c:215 [inlined] jl_toplevel_eval_flex at /source/src/toplevel.c:743 jl_toplevel_eval_flex at /source/src/toplevel.c:886 ijl_toplevel_eval_in at /source/src/toplevel.c:994 eval at ./boot.jl:430 [inlined] include_string at ./loading.jl:2777 _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 _include at ./loading.jl:2837 include at ./Base.jl:562 [inlined] include_package_for_output at ./loading.jl:2924 jfptr_include_package_for_output_72244.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] do_call at /source/src/interpreter.c:126 eval_value at /source/src/interpreter.c:223 eval_stmt_value at /source/src/interpreter.c:174 [inlined] eval_body at /source/src/interpreter.c:670 jl_interpret_toplevel_thunk at /source/src/interpreter.c:824 jl_toplevel_eval_flex at /source/src/toplevel.c:943 jl_toplevel_eval_flex at /source/src/toplevel.c:886 ijl_toplevel_eval_in at /source/src/toplevel.c:994 eval at ./boot.jl:430 [inlined] include_string at ./loading.jl:2777 include_string at ./loading.jl:2787 [inlined] exec_options at ./client.jl:314 _start at ./client.jl:524 jfptr__start_73686.1 at /opt/julia/lib/julia/sys.so (unknown line) _jl_invoke at /source/src/gf.c:2951 [inlined] ijl_apply_generic at /source/src/gf.c:3128 jl_apply at /source/src/julia.h:2157 [inlined] true_main at /source/src/jlapi.c:900 jl_repl_entrypoint at /source/src/jlapi.c:1059 main at /source/cli/loader_exe.c:58 unknown function (ip: 0x7c25bd2c1249) __libc_start_main at /lib/x86_64-linux-gnu/libc.so.6 (unknown line) unknown function (ip: 0x4010b8) Allocations: 6309947 (Pool: 6309771; Big: 176); GC: 10 in expression starting at /PkgEval.jl/scripts/precompile.jl:34 Precompilation failed after 245.75s ################################################################################ # Testing # Testing LinearAlgebraMPI Status `/tmp/jl_Hh34vI/Project.toml` [79e6a3ab] Adapt v4.4.0 [8f478455] Blake3Hash v0.3.0 [052768ef] CUDA v5.9.6 [63c18a36] KernelAbstractions v0.9.39 [5bdd2be4] LinearAlgebraMPI v0.1.9 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [55d2b088] MUMPS v1.6.0 [3fe64909] NCCL v0.1.2 ⌅ [aea7be01] PrecompileTools v1.2.1 [90137ffa] StaticArrays v1.9.16 [4889d778] CUDSS_jll v0.7.1+0 [37e2e46d] LinearAlgebra v1.11.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.11.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_Hh34vI/Manifest.toml` [621f4979] AbstractFFTs v1.5.0 [79e6a3ab] Adapt v4.4.0 [a9b6321e] Atomix v1.1.2 [ab4f0b2a] BFloat16s v0.6.1 [8f478455] Blake3Hash v0.3.0 [fa961155] CEnum v0.5.0 [052768ef] CUDA v5.9.6 [1af6417a] CUDA_Runtime_Discovery v1.0.0 [34da2185] Compat v4.18.1 [a8cc5b0e] Crayons v4.1.1 [9a962f9c] DataAPI v1.16.0 [a93c6f00] DataFrames v1.8.1 [864edb3b] DataStructures v0.19.3 [e2d170a0] DataValueInterfaces v1.0.0 [ffbed154] DocStringExtensions v0.9.5 [e2ba6199] ExprTools v0.1.10 [0c68f7d7] GPUArrays v11.3.4 [46192b85] GPUArraysCore v0.2.0 [61eb1bfa] GPUCompiler v1.8.0 [096a3bc2] GPUToolbox v1.0.0 [076d061b] HashArrayMappedTries v0.2.0 [842dd82b] InlineStrings v1.4.5 [41ab1584] InvertedIndices v1.3.1 [82899510] IteratorInterfaceExtensions v1.0.0 [692b3bcd] JLLWrappers v1.7.1 [63c18a36] KernelAbstractions v0.9.39 [929cbde3] LLVM v9.4.4 [8b046642] LLVMLoopInfo v1.0.0 [b964fa9f] LaTeXStrings v1.4.0 [5bdd2be4] LinearAlgebraMPI v0.1.9 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [55d2b088] MUMPS v1.6.0 [1914dd2f] MacroTools v0.5.16 [e1d29d7a] Missings v1.2.0 [3fe64909] NCCL v0.1.2 [5da4648a] NVTX v1.0.3 [bac558e1] OrderedCollections v1.8.1 [eebad327] PkgVersion v0.3.3 [2dfb63ee] PooledArrays v1.4.3 ⌅ [aea7be01] PrecompileTools v1.2.1 [21216c6a] Preferences v1.5.1 [08abe8d2] PrettyTables v3.1.2 [74087812] Random123 v1.7.1 [e6cf234a] RandomNumbers v1.6.0 [189a3867] Reexport v1.2.2 [ae029012] Requires v1.3.1 [fdea26ae] SIMD v3.7.2 [7e506255] ScopedValues v1.5.0 [6c6a2e73] Scratch v1.3.0 [91c51154] SentinelArrays v1.4.9 [a2af1166] SortingAlgorithms v1.2.2 [90137ffa] StaticArrays v1.9.16 [1e83bf80] StaticArraysCore v1.4.4 [10745b16] Statistics v1.11.1 [892a3eda] StringManipulation v0.4.2 [3783bdb8] TableTraits v1.0.1 [bd369af6] Tables v1.12.1 [e689c965] Tracy v0.1.6 [013be700] UnsafeAtomics v0.3.0 [6e34b625] Bzip2_jll v1.0.9+0 [d1e2174e] CUDA_Compiler_jll v0.4.1+1 [4ee394cb] CUDA_Driver_jll v13.1.0+2 ⌅ [76a88914] CUDA_Runtime_jll v0.19.2+0 [4889d778] CUDSS_jll v0.7.1+0 [e33a78d0] Hwloc_jll v2.12.2+0 [9c1d0b0a] JuliaNVTXCallbacks_jll v0.2.1+0 [dad2f222] LLVMExtra_jll v0.0.38+0 [ad6e5548] LibTracyClient_jll v0.13.1+0 [94ce4f54] Libiconv_jll v1.18.0+0 [d00139f3] METIS_jll v5.1.3+0 [7cb0a576] MPICH_jll v4.3.2+0 [f1f71cc9] MPItrampoline_jll v5.5.4+0 [ca64183c] MUMPS_jll v5.8.2+0 [9237b28f] MicrosoftMPI_jll v10.1.4+3 [4d6d38e4] NCCL_jll v2.28.3+0 [e98f9f5b] NVTX_jll v3.2.2+0 [656ef2d0] OpenBLAS32_jll v0.3.29+0 [fe0851c0] OpenMPI_jll v5.0.9+0 [b247a4be] PARMETIS_jll v4.0.6+2 [aabda75e] SCALAPACK32_jll v2.2.2+0 [a8d0f55d] SCOTCH_jll v7.0.7+0 ⌅ [02c8fc9c] XML2_jll v2.13.9+0 [ffd25f8a] XZ_jll v5.8.2+0 [a65dc6b1] Xorg_libpciaccess_jll v0.18.1+0 [1e29f10c] demumble_jll v1.3.0+0 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.6.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v0.6.4 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.11.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [ca575930] NetworkOptions v1.2.0 [44cfe95a] Pkg v1.11.0 [de0858da] Printf v1.11.0 [3fa0cd96] REPL v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v0.7.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.11.0 [f489334b] StyledStrings v1.11.0 [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.1.1+0 [deac9b47] LibCURL_jll v8.6.0+0 [e37daf67] LibGit2_jll v1.7.2+0 [29816b5a] LibSSH2_jll v1.11.0+1 [c8ffd9c3] MbedTLS_jll v2.28.6+0 [14a3606d] MozillaCACerts_jll v2023.12.12 [4536629a] OpenBLAS_jll v0.3.27+1 [bea87d4a] SuiteSparse_jll v7.7.0+0 [83775a58] Zlib_jll v1.2.13+1 [8e850b90] libblastrampoline_jll v5.11.0+0 [8e850ede] nghttp2_jll v1.59.0+0 [3f19e933] p7zip_jll v17.4.0+2 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. Testing Running tests... Precompiling MPI... 1306.0 ms ✓ MPIPreferences 3554.5 ms ✓ MPItrampoline_jll 3349.5 ms ✓ OpenMPI_jll 3463.4 ms ✓ MPICH_jll 10289.2 ms ✓ MPI 5 dependencies successfully precompiled in 23 seconds. 38 already precompiled. Precompiling LinearAlgebraMPI... 2329.4 ms ✓ Blake3Hash 6225.5 ms ✓ SCALAPACK32_jll 3536.0 ms ✓ PARMETIS_jll 3573.8 ms ✓ MUMPS_jll 5347.9 ms ✓ MUMPS Info Given LinearAlgebraMPI was explicitly requested, output will be shown live  ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types.  Closest candidates are:  MUMPS.MUMPS_JOB(::Integer)  @ MUMPS Enums.jl:211  convert(::Type{T}, !Matched::T) where T  @ Base Base.jl:126  Stacktrace:  [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32)  @ Base ./Base.jl:52  [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328  [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419  [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64})  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484  [5] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined]  [6] macro expansion  @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined]  [7] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined]  [8] macro expansion  @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined]  [9] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308  [10] include  @ ./Base.jl:562 [inlined]  [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:2924  [12] top-level scope  @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:6 ✗ LinearAlgebraMPI 5 dependencies successfully precompiled in 75 seconds. 63 already precompiled. ┌ Warning: Precompile step hit an error; tests may still proceed │ err = │ The following 1 direct dependency failed to precompile: │ │ LinearAlgebraMPI │ │ Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPI/jl_uuXCAB". │ ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB │ The function `convert` exists, but no method is defined for this combination of argument types. │ │ Closest candidates are: │ MUMPS.MUMPS_JOB(::Integer) │ @ MUMPS Enums.jl:211 │ convert(::Type{T}, !Matched::T) where T │ @ Base Base.jl:126 │ │ Stacktrace: │ [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) │ @ Base ./Base.jl:52 │ [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) │ @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 │ [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) │ @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 │ [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) │ @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 │ [5] macro expansion │ @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] │ [6] macro expansion │ @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined] │ [7] macro expansion │ @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] │ [8] macro expansion │ @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined] │ [9] top-level scope │ @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 │ [10] include │ @ ./Base.jl:562 [inlined] │ [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) │ @ Base ./loading.jl:2924 │ [12] top-level scope │ @ stdin:6 │ in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 │ in expression starting at stdin: └ @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:12 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffde11dc450) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_matrix_multiplication.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_matrix_multiplication.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Matrix Multiplication: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:38 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:38 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fff06844780) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_transpose.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_transpose.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Transpose: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:41 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:41 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe2cff8d20) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_addition.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_addition.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Addition: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:44 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:44 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 PrecompilingPrecompiling LinearAlgebraMPI... LinearAlgebraMPI... LinearAlgebraMPI Being precompiled by another process (pid: 563, pidfile: /home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPI/t5NwP_eJM3J.ji.pidfile) Info Given LinearAlgebraMPI was explicitly requested, output will be shown live  ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types.  Closest candidates are:  MUMPS.MUMPS_JOB(::Integer)  @ MUMPS Enums.jl:211  convert(::Type{T}, !Matched::T) where T  @ Base Base.jl:126  Stacktrace:  [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32)  @ Base ./Base.jl:52  [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328  [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419  [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64})  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484  [5] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined]  [6] macro expansion  @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined]  [7] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined]  [8] macro expansion  @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined]  [9] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308  [10] include  @ ./Base.jl:562 [inlined]  [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:2924  [12] top-level scope  @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:6 ✗ LinearAlgebraMPI 0 dependencies successfully precompiled in 52 seconds. 68 already precompiled. ERROR: LoadError: The following 1 direct dependency failed to precompile: LinearAlgebraMPI Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPI/jl_84S5Kx". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base.jl:126 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base.jl:52 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include @ ./Base.jl:562 [inlined] [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:2924 [12] top-level scope @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin: in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl:17 Info Given LinearAlgebraMPI was explicitly requested, output will be shown live  ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types.  Closest candidates are:  MUMPS.MUMPS_JOB(::Integer)  @ MUMPS Enums.jl:211  convert(::Type{T}, !Matched::T) where T  @ Base Base.jl:126  Stacktrace:  [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32)  @ Base ./Base.jl:52  [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328  [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool)  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419  [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64})  @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484  [5] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined]  [6] macro expansion  @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined]  [7] macro expansion  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined]  [8] macro expansion  @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined]  [9] top-level scope  @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308  [10] include  @ ./Base.jl:562 [inlined]  [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:2924  [12] top-level scope  @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin:6 ✗ LinearAlgebraMPI 0 dependencies successfully precompiled in 162 seconds. 68 already precompiled. ERROR: LoadError: The following 1 direct dependency failed to precompile: LinearAlgebraMPI Failed to precompile LinearAlgebraMPI [5bdd2be4-ae34-42ef-8b36-f4c85d48f377] to "/home/pkgeval/.julia/compiled/v1.11/LinearAlgebraMPI/jl_X2jiTs". ERROR: LoadError: MethodError: Cannot `convert` an object of type Int32 to an object of type MUMPS.MUMPS_JOB The function `convert` exists, but no method is defined for this combination of argument types. Closest candidates are: MUMPS.MUMPS_JOB(::Integer) @ MUMPS Enums.jl:211 convert(::Type{T}, !Matched::T) where T @ Base Base.jl:126 Stacktrace: [1] setproperty!(x::MUMPS.Mumps{Float64, Float64}, f::Symbol, v::Int32) @ Base ./Base.jl:52 [2] _get_or_create_analysis_plan(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:328 [3] _create_mumps_factorization(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}, symmetric::Bool) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:419 [4] ldlt(A::LinearAlgebraMPI.SparseMatrixMPI_CPU{Float64, Int64}) @ LinearAlgebraMPI ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/mumps_factorization.jl:484 [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1414 [inlined] [6] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:78 [inlined] [7] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1339 [inlined] [8] macro expansion @ ~/.julia/packages/PrecompileTools/L8A3n/src/workloads.jl:140 [inlined] [9] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1308 [10] include @ ./Base.jl:562 [inlined] [11] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:2924 [12] top-level scope @ stdin:6 in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/src/LinearAlgebraMPI.jl:1 in expression starting at stdin: in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl:17 ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl" │ ok = false │ expect_success = true │ exitcode = 1 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_lazy_transpose.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Lazy Transpose: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:47 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:47 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffdf91da940) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_vector_multiplication.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_vector_multiplication.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Vector Multiplication: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:50 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:50 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fff2d384380) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_dense_matrix.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_dense_matrix.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Dense Matrix: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:53 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:53 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffca4ffd7d0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_sparse_api.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_sparse_api.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Sparse API Extensions: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:56 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:56 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffcfa72f640) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_blocks.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_blocks.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Block Matrix Operations: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:59 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:59 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffde39c74b0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_utilities.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_utilities.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Utilities: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:62 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:62 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffda0022da0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_local_constructors.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_local_constructors.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Local Constructors: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:65 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:65 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe5f691ce0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_indexing.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_indexing.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Indexing: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:68 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:68 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffd35a23cf0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:85 Expression: occursin("Pass:", output) Evaluated: occursin("Pass:", "") Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:85 [inlined] [3] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [4] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [6] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:86 Expression: occursin("Fail: 0", output) Evaluated: occursin("Fail: 0", "") Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:86 [inlined] [3] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [4] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [6] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 MPI Factorization: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:87 Expression: occursin("Error: 0", output) Evaluated: occursin("Error: 0", "") Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:87 [inlined] [3] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [4] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:73 [inlined] [5] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [6] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffd1a868aa0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffc76d588e0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_new_operations.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_new_operations.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" Mixed Sparse-Dense Operations: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:90 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:90 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffe01b02e50) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_repartition.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_repartition.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI Repartition: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:93 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:93 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7ffea7e455a0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) Fatal error in internal_Init_thread: Other MPI error, error stack: internal_Init_thread(71)...........: MPI_Init_thread(argc=(nil), argv=(nil), required=2, provided=0x7fffce19e1b0) failed MPII_Init_thread(257)..............: MPID_Init(65)......................: init_world(169)....................: channel initialization failed MPIDI_CH3_Init(84).................: MPID_nem_init(314).................: MPID_nem_tcp_init(175).............: MPID_nem_tcp_get_business_card(400): GetSockInterfaceAddr(373)..........: gethostbyname failed, LinearAlgebraMPI-primary-uucGkgNA (errno 1) ┌ Info: MPI test exit status mismatch │ test_file = "/home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_map_rows.jl" │ ok = false │ expect_success = true │ exitcode = 15 │ cmd = setenv(`/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --threads=2 --project=/tmp/jl_Hh34vI/Project.toml /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/test_map_rows.jl`,["LANG=C.UTF-8", "PYTHON=", "PATH=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/bin:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/opt/julia/bin", "OPENBLAS_MAIN_FREE=1", "JULIA_CPU_THREADS=1", "JULIA_NUM_PRECOMPILE_TASKS=1", "DISPLAY=:1", "MPITRAMPOLINE_MPIEXEC=/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib/mpich/bin/mpiexec", "JULIA_LOAD_PATH=@:/tmp/jl_Hh34vI", "UCX_MEMTYPE_CACHE=no" … "OPENBLAS_NUM_THREADS=1", "UCX_ERROR_SIGNALS=SIGILL,SIGBUS,SIGFPE", "LD_LIBRARY_PATH=/opt/julia/bin/../lib/julia:/home/pkgeval/.julia/artifacts/0e0ce7ca2ccbc8e501570e8e0b6c7d4d9e8f1cdc/lib:/opt/julia/bin/../lib/julia:/opt/julia/bin/../lib", "HOME=/home/pkgeval", "CI=true", "JULIA_PKG_PRECOMPILE_AUTO=0", "JULIA_PKGEVAL=true", "JULIA_DEPOT_PATH=/home/pkgeval/.julia:/usr/local/share/julia:", "R_HOME=*", "JULIA_NUM_THREADS=1"]) └ active_proj = "/tmp/jl_Hh34vI/Project.toml" MPI map_rows: Test Failed at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 Expression: ok == expect_success Evaluated: false == true Stacktrace: [1] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:680 [inlined] [2] run_mpi_test(test_file::String; nprocs::Int64, nthreads::Int64, expect_success::Bool) @ Main ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:33 [3] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:96 [inlined] [4] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [5] macro expansion @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:96 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.11/Test/src/Test.jl:1709 [inlined] [7] top-level scope @ ~/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:37 Test Summary: | Fail Total Time LinearAlgebraMPI Tests | 17 17 5m07.3s MPI Matrix Multiplication | 1 1 14.7s MPI Transpose | 1 1 9.2s MPI Addition | 1 1 9.1s MPI Lazy Transpose | 1 1 2m54.9s MPI Vector Multiplication | 1 1 9.0s MPI Dense Matrix | 1 1 9.0s MPI Sparse API Extensions | 1 1 9.0s MPI Block Matrix Operations | 1 1 9.0s MPI Utilities | 1 1 9.0s MPI Local Constructors | 1 1 9.0s MPI Indexing | 1 1 9.1s MPI Factorization | 3 3 9.4s Mixed Sparse-Dense Operations | 1 1 9.0s MPI Repartition | 1 1 9.0s MPI map_rows | 1 1 9.1s ERROR: LoadError: Some tests did not pass: 0 passed, 17 failed, 0 errored, 0 broken. in expression starting at /home/pkgeval/.julia/packages/LinearAlgebraMPI/VWjgN/test/runtests.jl:36 Testing failed after 324.12s ERROR: LoadError: Package LinearAlgebraMPI errored during testing Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.11/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{Pkg.Types.PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.11/Pkg/src/Operations.jl:2128 [3] test @ /opt/julia/share/julia/stdlib/v1.11/Pkg/src/Operations.jl:2011 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{Pkg.Types.PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.11/Pkg/src/API.jl:481 [5] test(pkgs::Vector{Pkg.Types.PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.11/Pkg/src/API.jl:159 [6] test @ /opt/julia/share/julia/stdlib/v1.11/Pkg/src/API.jl:147 [inlined] [7] #test#74 @ /opt/julia/share/julia/stdlib/v1.11/Pkg/src/API.jl:146 [inlined] [8] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:223 in expression starting at /PkgEval.jl/scripts/evaluate.jl:214 PkgEval failed after 735.57s: package fails to precompile