Package evaluation to test GridapPETSc on Julia 1.14.0-DEV.1299 (6d6224db99*) started at 2025-11-27T20:03:47.613 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Set-up completed after 9.25s ################################################################################ # Installation # Installing GridapPETSc... Resolving package versions... Installed DiffResults ────────────── v1.1.0 Installed LightXML ───────────────── v0.9.3 Installed ForwardDiff ────────────── v1.3.0 Installed FillArrays ─────────────── v1.15.0 Installed MacroTools ─────────────── v0.5.16 Installed DataStructures ─────────── v0.19.3 Installed JLD2 ───────────────────── v0.5.15 Installed PkgVersion ─────────────── v0.3.3 Installed MPI ────────────────────── v0.20.23 Installed MKL_jll ────────────────── v2025.2.0+0 Installed PolynomialBases ────────── v0.4.25 Installed Adapt ──────────────────── v4.4.0 Installed SimpleUnPack ───────────── v1.1.0 Installed ConstructionBase ───────── v1.6.0 Installed oneTBB_jll ─────────────── v2022.0.0+1 Installed MPICH_jll ──────────────── v4.3.2+0 Installed AbstractTrees ──────────── v0.4.5 Installed PETSc_jll ──────────────── v3.22.0+0 Installed MicrosoftMPI_jll ───────── v10.1.4+3 Installed CodecZlib ──────────────── v0.7.8 Installed HashArrayMappedTries ───── v0.2.0 Installed OrderedCollections ─────── v1.8.1 Installed NearestNeighbors ───────── v0.4.24 Installed ArrayInterface ─────────── v7.22.0 Installed TranscodingStreams ─────── v0.11.3 Installed RecipesBase ────────────── v1.3.4 Installed Combinatorics ──────────── v1.0.3 Installed IntelOpenMP_jll ────────── v2025.2.0+0 Installed Statistics ─────────────── v1.11.1 Installed StatsAPI ───────────────── v1.7.1 Installed PartitionedArrays ──────── v0.3.5 Installed PrecompileTools ────────── v1.3.3 Installed OffsetArrays ───────────── v1.17.0 Installed ArgCheck ───────────────── v2.5.0 Installed CommonSubexpressions ───── v0.3.1 Installed FileIO ─────────────────── v1.17.1 Installed FastGaussQuadrature ────── v1.1.0 Installed StaticArrays ───────────── v1.9.15 Installed StaticArraysCore ───────── v1.4.4 Installed IrrationalConstants ────── v0.2.6 Installed BSON ───────────────────── v0.3.9 Installed Distances ──────────────── v0.10.12 Installed SparseMatricesCSR ──────── v0.6.9 Installed AbstractFFTs ───────────── v1.5.0 Installed SCALAPACK32_jll ────────── v2.2.2+0 Installed QuadGK ─────────────────── v2.11.2 Installed NaNMath ────────────────── v1.1.3 Installed OpenSpecFun_jll ────────── v0.5.6+0 Installed Requires ───────────────── v1.3.1 Installed NLsolve ────────────────── v4.5.1 Installed ArrayLayouts ───────────── v1.12.2 Installed Gridap ─────────────────── v0.19.6 Installed LogExpFunctions ────────── v0.3.29 Installed OpenMPI_jll ────────────── v5.0.9+0 Installed OpenBLAS32_jll ─────────── v0.3.29+0 Installed DiffRules ──────────────── v1.15.1 Installed Hwloc_jll ──────────────── v2.12.1+0 Installed AutoHashEquals ─────────── v2.2.0 Installed Parsers ────────────────── v2.8.3 Installed JSON ───────────────────── v0.21.4 Installed XML2_jll ───────────────── v2.15.1+0 Installed ScopedValues ───────────── v1.5.0 Installed Reexport ───────────────── v1.2.2 Installed NLSolversBase ──────────── v7.10.0 Installed FiniteDiff ─────────────── v2.29.0 Installed Setfield ───────────────── v1.1.2 Installed BlockArrays ────────────── v1.9.3 Installed SpecialFunctions ───────── v2.6.1 Installed MPIPreferences ─────────── v0.1.11 Installed FFTW_jll ───────────────── v3.3.11+0 Installed FFTW ───────────────────── v1.10.0 Installed Preferences ────────────── v1.5.0 Installed IterativeSolvers ───────── v0.9.4 Installed JLLWrappers ────────────── v1.7.1 Installed WriteVTK ───────────────── v1.21.2 Installed Libiconv_jll ───────────── v1.18.0+0 Installed VTKBase ────────────────── v1.0.1 Installed MPItrampoline_jll ──────── v5.5.4+0 Installed GridapDistributed ──────── v0.4.10 Installed ADTypes ────────────────── v1.19.0 Installed LineSearches ───────────── v7.5.1 Installed GridapPETSc ────────────── v0.5.6 Installed DocStringExtensions ────── v0.9.5 Installed CircularArrays ─────────── v1.4.0 Installed DifferentiationInterface ─ v0.7.12 Installing 9 artifacts Installed artifact OpenSpecFun 194.9 KiB Installed artifact FFTW 2.2 MiB Installed artifact SCALAPACK32 2.3 MiB Installed artifact Libiconv 1.9 MiB Installed artifact Hwloc 3.5 MiB Installed artifact XML2 2.2 MiB Installed artifact MPICH 3.3 MiB Installed artifact OpenBLAS32 10.0 MiB Installed artifact PETSc 236.3 MiB Updating `~/.julia/environments/v1.14/Project.toml` [bcdc36c2] + GridapPETSc v0.5.6 Updating `~/.julia/environments/v1.14/Manifest.toml` [47edcb42] + ADTypes v1.19.0 [621f4979] + AbstractFFTs v1.5.0 [1520ce14] + AbstractTrees v0.4.5 [79e6a3ab] + Adapt v4.4.0 [dce04be8] + ArgCheck v2.5.0 [4fba245c] + ArrayInterface v7.22.0 [4c555306] + ArrayLayouts v1.12.2 [15f4f7f2] + AutoHashEquals v2.2.0 [fbb218c0] + BSON v0.3.9 [8e7c35d0] + BlockArrays v1.9.3 [7a955b69] + CircularArrays v1.4.0 [944b1d66] + CodecZlib v0.7.8 [861a8166] + Combinatorics v1.0.3 [bbf7d656] + CommonSubexpressions v0.3.1 [187b0558] + ConstructionBase v1.6.0 [864edb3b] + DataStructures v0.19.3 [163ba53b] + DiffResults v1.1.0 [b552c78f] + DiffRules v1.15.1 [a0c0ee7d] + DifferentiationInterface v0.7.12 [b4f34e82] + Distances v0.10.12 [ffbed154] + DocStringExtensions v0.9.5 [7a1cc6ca] + FFTW v1.10.0 [442a2c76] + FastGaussQuadrature v1.1.0 [5789e2e9] + FileIO v1.17.1 [1a297f60] + FillArrays v1.15.0 [6a86dc24] + FiniteDiff v2.29.0 [f6369f11] + ForwardDiff v1.3.0 [56d4f2e9] + Gridap v0.19.6 [f9701e48] + GridapDistributed v0.4.10 [bcdc36c2] + GridapPETSc v0.5.6 [076d061b] + HashArrayMappedTries v0.2.0 [92d709cd] + IrrationalConstants v0.2.6 [42fd0dbc] + IterativeSolvers v0.9.4 ⌅ [033835bb] + JLD2 v0.5.15 [692b3bcd] + JLLWrappers v1.7.1 ⌅ [682c06a0] + JSON v0.21.4 [9c8b4983] + LightXML v0.9.3 [d3d80556] + LineSearches v7.5.1 [2ab3a3ac] + LogExpFunctions v0.3.29 [da04e1cc] + MPI v0.20.23 [3da0fdf6] + MPIPreferences v0.1.11 [1914dd2f] + MacroTools v0.5.16 [d41bc354] + NLSolversBase v7.10.0 [2774e3e8] + NLsolve v4.5.1 [77ba4419] + NaNMath v1.1.3 [b8a86587] + NearestNeighbors v0.4.24 [6fe1bfb0] + OffsetArrays v1.17.0 [bac558e1] + OrderedCollections v1.8.1 [69de0a69] + Parsers v2.8.3 ⌅ [5a9dfac6] + PartitionedArrays v0.3.5 [eebad327] + PkgVersion v0.3.3 [c74db56a] + PolynomialBases v0.4.25 [aea7be01] + PrecompileTools v1.3.3 [21216c6a] + Preferences v1.5.0 [1fd47b50] + QuadGK v2.11.2 [3cdcf5f2] + RecipesBase v1.3.4 [189a3867] + Reexport v1.2.2 [ae029012] + Requires v1.3.1 [7e506255] + ScopedValues v1.5.0 [efcf1570] + Setfield v1.1.2 [ce78b400] + SimpleUnPack v1.1.0 [a0a7dd2c] + SparseMatricesCSR v0.6.9 [276daf66] + SpecialFunctions v2.6.1 [90137ffa] + StaticArrays v1.9.15 [1e83bf80] + StaticArraysCore v1.4.4 [10745b16] + Statistics v1.11.1 [82ae8749] + StatsAPI v1.7.1 [3bb67fe8] + TranscodingStreams v0.11.3 [4004b06d] + VTKBase v1.0.1 [64499a7a] + WriteVTK v1.21.2 [f5851436] + FFTW_jll v3.3.11+0 ⌃ [e33a78d0] + Hwloc_jll v2.12.1+0 [1d5cc7b8] + IntelOpenMP_jll v2025.2.0+0 [94ce4f54] + Libiconv_jll v1.18.0+0 [856f044c] + MKL_jll v2025.2.0+0 [7cb0a576] + MPICH_jll v4.3.2+0 [f1f71cc9] + MPItrampoline_jll v5.5.4+0 [9237b28f] + MicrosoftMPI_jll v10.1.4+3 [656ef2d0] + OpenBLAS32_jll v0.3.29+0 [fe0851c0] + OpenMPI_jll v5.0.9+0 [efe28fd5] + OpenSpecFun_jll v0.5.6+0 [8fa3689e] + PETSc_jll v3.22.0+0 [aabda75e] + SCALAPACK32_jll v2.2.2+0 [02c8fc9c] + XML2_jll v2.15.1+0 [1317d2d5] + oneTBB_jll v2022.0.0+1 [0dad84c5] + ArgTools v1.1.2 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [f43a241f] + Downloads v1.7.0 [7b1f6079] + FileWatching v1.11.0 [9fa8497b] + Future v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [ac6e5ff7] + JuliaSyntaxHighlighting v1.12.0 [4af54fe1] + LazyArtifacts v1.11.0 [b27032c2] + LibCURL v1.0.0 [76f85450] + LibGit2 v1.11.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.13.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [a63ad114] + Mmap v1.11.0 [ca575930] + NetworkOptions v1.3.0 [44cfe95a] + Pkg v1.13.0 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v1.0.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.13.0 [f489334b] + StyledStrings v1.13.0 [4607b0f0] + SuiteSparse [fa267f1f] + TOML v1.0.3 [a4e569a6] + Tar v1.10.0 [8dfed614] + Test v1.11.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] + LibCURL_jll v8.17.0+0 [e37daf67] + LibGit2_jll v1.9.1+0 [29816b5a] + LibSSH2_jll v1.11.3+1 [14a3606d] + MozillaCACerts_jll v2025.11.4 [4536629a] + OpenBLAS_jll v0.3.29+0 [05823500] + OpenLibm_jll v0.8.7+0 [458c3c95] + OpenSSL_jll v3.5.4+0 [efcefdf7] + PCRE2_jll v10.47.0+0 [bea87d4a] + SuiteSparse_jll v7.10.1+0 [83775a58] + Zlib_jll v1.3.1+2 [3161d3a3] + Zstd_jll v1.5.7+1 [8e850b90] + libblastrampoline_jll v5.15.0+0 [8e850ede] + nghttp2_jll v1.68.0+1 [3f19e933] + p7zip_jll v17.7.0+0 Info Packages marked with ⌃ and ⌅ have new versions available. Those with ⌃ may be upgradable, but those with ⌅ are restricted by compatibility constraints from upgrading. To see why use `status --outdated -m` Building GridapPETSc → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/aeb0905e921f122e11669e3f10faa318687aa3e0/build.log` Installation completed after 77.1s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling packages... WARNING: Imported binding Operations.project_rel_path was undeclared at import time during import to TestEnv. 4552.8 ms ✓ TestEnv 1 dependency successfully precompiled in 5 seconds. 27 already precompiled. 1 dependency had output during precompilation: ┌ TestEnv │ WARNING: Imported binding Operations.project_rel_path was undeclared at import time during import to TestEnv. └ ┌ Error: Failed to use TestEnv.jl; test dependencies will not be precompiled │ exception = │ UndefVarError: `project_rel_path` not defined in `TestEnv` │ Suggestion: this global was defined as `Pkg.Operations.project_rel_path` but not assigned a value. │ Stacktrace: │ [1] get_test_dir(ctx::Pkg.Types.Context, pkgspec::PackageSpec) │ @ TestEnv ~/.julia/packages/TestEnv/i9lgt/src/julia-1.11/common.jl:75 │ [2] test_dir_has_project_file │ @ ~/.julia/packages/TestEnv/i9lgt/src/julia-1.11/common.jl:52 [inlined] │ [3] maybe_gen_project_override! │ @ ~/.julia/packages/TestEnv/i9lgt/src/julia-1.11/common.jl:83 [inlined] │ [4] activate(pkg::String; allow_reresolve::Bool) │ @ TestEnv ~/.julia/packages/TestEnv/i9lgt/src/julia-1.11/activate_set.jl:12 │ [5] activate(pkg::String) │ @ TestEnv ~/.julia/packages/TestEnv/i9lgt/src/julia-1.11/activate_set.jl:9 │ [6] top-level scope │ @ /PkgEval.jl/scripts/precompile.jl:24 │ [7] include(mod::Module, _path::String) │ @ Base ./Base.jl:309 │ [8] exec_options(opts::Base.JLOptions) │ @ Base ./client.jl:344 │ [9] _start() │ @ Base ./client.jl:577 └ @ Main /PkgEval.jl/scripts/precompile.jl:26 Precompiling package dependencies... Precompiling packages... 4288.8 ms ✓ MacroTools 801.9 ms ✓ Reexport 1079.2 ms ✓ Statistics 1085.2 ms ✓ ConstructionBase 2057.8 ms ✓ IrrationalConstants 953.1 ms ✓ StaticArraysCore 824.8 ms ✓ StatsAPI 1177.2 ms ✓ Requires 919.7 ms ✓ VTKBase 1205.1 ms ✓ OrderedCollections 886.1 ms ✓ HashArrayMappedTries 1356.0 ms ✓ ADTypes 1382.6 ms ✓ DocStringExtensions 1414.2 ms ✓ AbstractTrees 857.1 ms ✓ SimpleUnPack 2122.2 ms ✓ BSON 2006.7 ms ✓ Combinatorics 1030.2 ms ✓ AbstractFFTs 1587.0 ms ✓ OffsetArrays 847.5 ms ✓ ArgCheck 2100.0 ms ✓ FillArrays 943.8 ms ✓ AutoHashEquals 1255.6 ms ✓ TranscodingStreams 1221.1 ms ✓ SuiteSparse 1014.8 ms ✓ NaNMath 1119.9 ms ✓ Preferences 4475.3 ms ✓ PkgVersion 1714.3 ms ✓ CommonSubexpressions 1297.4 ms ✓ Statistics → SparseArraysExt 757.0 ms ✓ ConstructionBase → ConstructionBaseLinearAlgebraExt 858.0 ms ✓ DiffResults 1491.3 ms ✓ Distances 923.3 ms ✓ Adapt 8910.9 ms ✓ FileIO 3460.8 ms ✓ DataStructures 891.4 ms ✓ ScopedValues 1842.7 ms ✓ DifferentiationInterface 762.5 ms ✓ ADTypes → ADTypesConstructionBaseExt 1359.8 ms ✓ LogExpFunctions 4016.5 ms ✓ AbstractFFTs → AbstractFFTsTestExt 822.5 ms ✓ CircularArrays 2357.3 ms ✓ FillArrays → FillArraysSparseArraysExt 953.0 ms ✓ FillArrays → FillArraysStatisticsExt 1259.2 ms ✓ CodecZlib 1861.1 ms ✓ SparseMatricesCSR 1178.1 ms ✓ JLLWrappers 1978.4 ms ✓ MPIPreferences 1272.9 ms ✓ PrecompileTools 2952.9 ms ✓ Setfield 1254.5 ms ✓ Distances → DistancesSparseArraysExt 1137.4 ms ✓ ArrayInterface 1239.3 ms ✓ Adapt → AdaptSparseArraysExt 813.3 ms ✓ OffsetArrays → OffsetArraysAdaptExt 2268.1 ms ✓ QuadGK 1282.5 ms ✓ DifferentiationInterface → DifferentiationInterfaceSparseArraysExt 4846.5 ms ✓ IntelOpenMP_jll 1404.7 ms ✓ OpenBLAS32_jll 1390.0 ms ✓ Libiconv_jll 4976.1 ms ✓ oneTBB_jll 1413.3 ms ✓ FFTW_jll 1408.7 ms ✓ Hwloc_jll 1128.3 ms ✓ MicrosoftMPI_jll 1439.4 ms ✓ OpenSpecFun_jll 4899.6 ms ✓ MPItrampoline_jll 3012.3 ms ✓ RecipesBase 13528.5 ms ✓ StaticArrays 16502.9 ms ✓ Parsers 47523.1 ms ✓ JLD2 719.0 ms ✓ ArrayInterface → ArrayInterfaceStaticArraysCoreExt 1262.4 ms ✓ ArrayInterface → ArrayInterfaceSparseArraysExt 1462.1 ms ✓ XML2_jll  Downloading artifact: IntelOpenMP  Downloading artifact: oneTBB 10742.5 ms ✓ MKL_jll 4772.2 ms ✓ MPICH_jll 4334.8 ms ✓ OpenMPI_jll 4807.0 ms ✓ SpecialFunctions 3821.2 ms ✓ IterativeSolvers 1386.8 ms ✓ StaticArrays → StaticArraysStatisticsExt 1405.7 ms ✓ ConstructionBase → ConstructionBaseStaticArraysExt 23404.0 ms ✓ ArrayLayouts 1537.1 ms ✓ Adapt → AdaptStaticArraysExt 1434.3 ms ✓ DifferentiationInterface → DifferentiationInterfaceStaticArraysExt 3865.8 ms ✓ JSON 1336.0 ms ✓ FiniteDiff 1058.4 ms ✓ LightXML 6435.1 ms ✓ FFTW 11289.4 ms ✓ MPI 5119.2 ms ✓ SCALAPACK32_jll 5070.6 ms ✓ FastGaussQuadrature 1241.7 ms ✓ DiffRules 4797.4 ms ✓ NearestNeighbors 3419.3 ms ✓ ArrayLayouts → ArrayLayoutsSparseArraysExt 4936.9 ms ✓ BlockArrays 1253.5 ms ✓ FiniteDiff → FiniteDiffSparseArraysExt 1367.8 ms ✓ FiniteDiff → FiniteDiffStaticArraysExt 953.8 ms ✓ DifferentiationInterface → DifferentiationInterfaceFiniteDiffExt 1743.2 ms ✓ WriteVTK 8534.0 ms ✓ PartitionedArrays 5612.9 ms ✓ PETSc_jll 4546.8 ms ✓ PolynomialBases 6560.3 ms ✓ ForwardDiff 2943.5 ms ✓ BlockArrays → BlockArraysAdaptExt 2068.4 ms ✓ ForwardDiff → ForwardDiffStaticArraysExt 1506.4 ms ✓ DifferentiationInterface → DifferentiationInterfaceForwardDiffExt 2164.7 ms ✓ NLSolversBase 2497.5 ms ✓ LineSearches 2474.0 ms ✓ NLsolve 47669.4 ms ✓ Gridap WARNING: Constructor for type "FESpace" was extended in `GridapDistributed` without explicit qualification or import.  NOTE: Assumed "FESpace" refers to `.FESpace`. This behavior is deprecated and may differ in future versions.  NOTE: This behavior may have differed in Julia versions prior to 1.12.  Hint: If you intended to create a new generic function of the same name, use `function FESpace end`.  Hint: To silence the warning, qualify `FESpace` as `.FESpace` in the method signature or explicitly `import : FESpace`. 17487.4 ms ✓ GridapDistributed 17479.7 ms ✓ GridapPETSc 109 dependencies successfully precompiled in 431 seconds. 37 already precompiled. 2 dependencies had output during precompilation: ┌ MKL_jll │ Downloading artifact: IntelOpenMP │ Downloading artifact: oneTBB └ ┌ GridapDistributed │ WARNING: Constructor for type "FESpace" was extended in `GridapDistributed` without explicit qualification or import. │ NOTE: Assumed "FESpace" refers to `.FESpace`. This behavior is deprecated and may differ in future versions. │ NOTE: This behavior may have differed in Julia versions prior to 1.12. │ Hint: If you intended to create a new generic function of the same name, use `function FESpace end`. │ Hint: To silence the warning, qualify `FESpace` as `.FESpace` in the method signature or explicitly `import : FESpace`. └ Precompilation completed after 448.11s ################################################################################ # Testing # Testing GridapPETSc Status `/tmp/jl_vroPIT/Project.toml` [56d4f2e9] Gridap v0.19.6 [f9701e48] GridapDistributed v0.4.10 [bcdc36c2] GridapPETSc v0.5.6 [da04e1cc] MPI v0.20.23 ⌅ [5a9dfac6] PartitionedArrays v0.3.5 [a0a7dd2c] SparseMatricesCSR v0.6.9 [8fa3689e] PETSc_jll v3.22.0+0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.13.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_vroPIT/Manifest.toml` [47edcb42] ADTypes v1.19.0 [621f4979] AbstractFFTs v1.5.0 [1520ce14] AbstractTrees v0.4.5 [79e6a3ab] Adapt v4.4.0 [dce04be8] ArgCheck v2.5.0 [4fba245c] ArrayInterface v7.22.0 [4c555306] ArrayLayouts v1.12.2 [15f4f7f2] AutoHashEquals v2.2.0 [fbb218c0] BSON v0.3.9 [8e7c35d0] BlockArrays v1.9.3 [7a955b69] CircularArrays v1.4.0 [944b1d66] CodecZlib v0.7.8 [861a8166] Combinatorics v1.0.3 [bbf7d656] CommonSubexpressions v0.3.1 [187b0558] ConstructionBase v1.6.0 [864edb3b] DataStructures v0.19.3 [163ba53b] DiffResults v1.1.0 [b552c78f] DiffRules v1.15.1 [a0c0ee7d] DifferentiationInterface v0.7.12 [b4f34e82] Distances v0.10.12 [ffbed154] DocStringExtensions v0.9.5 [7a1cc6ca] FFTW v1.10.0 [442a2c76] FastGaussQuadrature v1.1.0 [5789e2e9] FileIO v1.17.1 [1a297f60] FillArrays v1.15.0 [6a86dc24] FiniteDiff v2.29.0 [f6369f11] ForwardDiff v1.3.0 [56d4f2e9] Gridap v0.19.6 [f9701e48] GridapDistributed v0.4.10 [bcdc36c2] GridapPETSc v0.5.6 [076d061b] HashArrayMappedTries v0.2.0 [92d709cd] IrrationalConstants v0.2.6 [42fd0dbc] IterativeSolvers v0.9.4 ⌅ [033835bb] JLD2 v0.5.15 [692b3bcd] JLLWrappers v1.7.1 ⌅ [682c06a0] JSON v0.21.4 [9c8b4983] LightXML v0.9.3 [d3d80556] LineSearches v7.5.1 [2ab3a3ac] LogExpFunctions v0.3.29 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [1914dd2f] MacroTools v0.5.16 [d41bc354] NLSolversBase v7.10.0 [2774e3e8] NLsolve v4.5.1 [77ba4419] NaNMath v1.1.3 [b8a86587] NearestNeighbors v0.4.24 [6fe1bfb0] OffsetArrays v1.17.0 [bac558e1] OrderedCollections v1.8.1 [69de0a69] Parsers v2.8.3 ⌅ [5a9dfac6] PartitionedArrays v0.3.5 [eebad327] PkgVersion v0.3.3 [c74db56a] PolynomialBases v0.4.25 [aea7be01] PrecompileTools v1.3.3 [21216c6a] Preferences v1.5.0 [1fd47b50] QuadGK v2.11.2 [3cdcf5f2] RecipesBase v1.3.4 [189a3867] Reexport v1.2.2 [ae029012] Requires v1.3.1 [7e506255] ScopedValues v1.5.0 [efcf1570] Setfield v1.1.2 [ce78b400] SimpleUnPack v1.1.0 [a0a7dd2c] SparseMatricesCSR v0.6.9 [276daf66] SpecialFunctions v2.6.1 [90137ffa] StaticArrays v1.9.15 [1e83bf80] StaticArraysCore v1.4.4 [10745b16] Statistics v1.11.1 [82ae8749] StatsAPI v1.7.1 [3bb67fe8] TranscodingStreams v0.11.3 [4004b06d] VTKBase v1.0.1 [64499a7a] WriteVTK v1.21.2 [f5851436] FFTW_jll v3.3.11+0 ⌃ [e33a78d0] Hwloc_jll v2.12.1+0 [1d5cc7b8] IntelOpenMP_jll v2025.2.0+0 [94ce4f54] Libiconv_jll v1.18.0+0 [856f044c] MKL_jll v2025.2.0+0 [7cb0a576] MPICH_jll v4.3.2+0 [f1f71cc9] MPItrampoline_jll v5.5.4+0 [9237b28f] MicrosoftMPI_jll v10.1.4+3 [656ef2d0] OpenBLAS32_jll v0.3.29+0 [fe0851c0] OpenMPI_jll v5.0.9+0 [efe28fd5] OpenSpecFun_jll v0.5.6+0 [8fa3689e] PETSc_jll v3.22.0+0 [aabda75e] SCALAPACK32_jll v2.2.2+0 [02c8fc9c] XML2_jll v2.15.1+0 [1317d2d5] oneTBB_jll v2022.0.0+1 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.7.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [ac6e5ff7] JuliaSyntaxHighlighting v1.12.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v1.0.0 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [a63ad114] Mmap v1.11.0 [ca575930] NetworkOptions v1.3.0 [44cfe95a] Pkg v1.13.0 [de0858da] Printf v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v1.0.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.13.0 [f489334b] StyledStrings v1.13.0 [4607b0f0] SuiteSparse [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] LibCURL_jll v8.17.0+0 [e37daf67] LibGit2_jll v1.9.1+0 [29816b5a] LibSSH2_jll v1.11.3+1 [14a3606d] MozillaCACerts_jll v2025.11.4 [4536629a] OpenBLAS_jll v0.3.29+0 [05823500] OpenLibm_jll v0.8.7+0 [458c3c95] OpenSSL_jll v3.5.4+0 [efcefdf7] PCRE2_jll v10.47.0+0 [bea87d4a] SuiteSparse_jll v7.10.1+0 [83775a58] Zlib_jll v1.3.1+2 [3161d3a3] Zstd_jll v1.5.7+1 [8e850b90] libblastrampoline_jll v5.15.0+0 [8e850ede] nghttp2_jll v1.68.0+1 [3f19e933] p7zip_jll v17.7.0+0 Info Packages marked with ⌃ and ⌅ have new versions available. Those with ⌃ may be upgradable, but those with ⌅ are restricted by compatibility constraints from upgrading. Testing Running tests... [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType host available, initializing [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDevice host initialized, default device id 0, view FALSE, init type lazy [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType cuda not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType hip not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType sycl not available [0] PetscInitialize_Common(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-kuDQvZEo.(none) [0] PetscInitialize_Common(): Running on machine: GridapPETSc-against-kuDQvZEo [0] PetscInitialize_Common(): BLAS: Environment number of OpenBLAS threads 1 given by OPENBLAS_NUM_THREADS [0] PetscBLASSetNumThreads(): Setting number of threads used for OpenBLAS provided BLAS 1 [0] PetscCommDuplicate(): Duplicating a communicator 1140850689 -2080374784 max tags = 1073741823 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-kuDQvZEo.(none) Vec Object: 1 MPI process type: seq 1. 2. 4. 1. [0] PetscCommDuplicate(): Duplicating a communicator 1140850688 -2080374783 max tags = 1073741823 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-kuDQvZEo.(none) Vec Object: 1 MPI process type: seq 1. 2. 4. 1. Vec Object: 1 MPI process type: seq 20. 40. 4. 60. [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 10 unneeded,10 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 3 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 0)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 3 nodes of 4. Limit used: 5. Using Inode routines Mat Object: 1 MPI process type: seqaij row 0: (1, 2.) (3, 3.) (4, 1.) row 1: (1, 6.) (3, 11.) (4, 5.) row 2: (1, 4.) (3, 3.) row 3: (3, 4.) (4, 3.) [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 4; storage space: 0 unneeded,10 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 3 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 0)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 4 nodes out of 4 rows. Not using Inode routines Mat Object: 1 MPI process type: seqaij row 0: (0, 4.) (1, -2.) row 1: (0, -1.) (1, 6.) (2, -2.) row 2: (1, -1.) (2, 6.) (3, -2.) row 3: (2, -1.) (3, 4.) [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] KSPConvergedDefault(): Linear solver has converged. Residual norm 1.249022930744e-16 is less than relative tolerance 1.000000000000e-05 times initial right-hand side norm 1.145643923739e+00 at iteration 4 [0] KSPConvergedDefault(): Linear solver has converged. Residual norm 1.249022930744e-16 is less than relative tolerance 1.000000000000e-05 times initial right-hand side norm 1.145643923739e+00 at iteration 4 [0] KSPConvergedDefault(): Linear solver has converged. Residual norm 1.291406315399e-16 is less than relative tolerance 1.000000000000e-05 times initial right-hand side norm 1.145643923739e+00 at iteration 4 KSP Object: (p_) 1 MPI process type: gmres restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement happy breakdown tolerance 1e-30 maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using PRECONDITIONED norm type for convergence test PC Object: (p_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=4, cols=4 total: nonzeros=10, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines [0] PetscFinalize(): PetscFinalize() called [0] Petsc_OuterComm_Attr_DeleteFn(): Removing reference to PETSc communicator embedded in a user MPI_Comm -2080374783 [0] Petsc_InnerComm_Attr_DeleteFn(): User MPI_Comm 1140850688 is being unlinked from inner PETSc comm -2080374783 [0] PetscCommDestroy(): Deleting PETSc MPI_Comm -2080374783 [0] Petsc_Counter_Attr_DeleteFn(): Deleting counter data in an MPI_Comm -2080374783 [0] Petsc_OuterComm_Attr_DeleteFn(): Removing reference to PETSc communicator embedded in a user MPI_Comm -2080374784 [0] Petsc_InnerComm_Attr_DeleteFn(): User MPI_Comm 1140850689 is being unlinked from inner PETSc comm -2080374784 [0] PetscCommDestroy(): Deleting PETSc MPI_Comm -2080374784 [0] Petsc_Counter_Attr_DeleteFn(): Deleting counter data in an MPI_Comm -2080374784 19.972710 seconds (5.70 M allocations: 315.479 MiB, 5.42% gc time, 37.07% compilation time: 12% of which was recompilation) [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType host available, initializing [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDevice host initialized, default device id 0, view FALSE, init type lazy [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType cuda not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType hip not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType sycl not available [0] PetscInitialize_Common(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-kuDQvZEo.(none) [0] PetscInitialize_Common(): Running on machine: GridapPETSc-against-kuDQvZEo [0] PetscInitialize_Common(): BLAS: Environment number of OpenBLAS threads 1 given by OPENBLAS_NUM_THREADS [0] PetscBLASSetNumThreads(): Setting number of threads used for OpenBLAS provided BLAS 1 [0] PetscCommDuplicate(): Duplicating a communicator 1140850689 -2080374784 max tags = 1073741823 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 20 unneeded,0 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 0 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 4)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 1 nodes of 4. Limit used: 5. Using Inode routines [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 14 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 1 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 14 unneeded,2 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 1 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 2)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 4 nodes out of 4 rows. Not using Inode routines 4×5 GridapPETSc.PETScMatrix: 0.0 0.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 7.0 0.0 0.0 0.0 0.0 0.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 3 X 2; storage space: 6 unneeded,0 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 0 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 3) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 1 nodes of 3. Limit used: 5. Using Inode routines [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 4; storage space: 0 unneeded,10 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 3 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 0)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 4 nodes out of 4 rows. Not using Inode routines [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] Petsc_OuterComm_Attr_DeleteFn(): Removing reference to PETSc communicator embedded in a user MPI_Comm -2080374784 [0] Petsc_InnerComm_Attr_DeleteFn(): User MPI_Comm 1140850689 is being unlinked from inner PETSc comm -2080374784 [0] PetscCommDestroy(): Deleting PETSc MPI_Comm -2080374784 [0] Petsc_Counter_Attr_DeleteFn(): Deleting counter data in an MPI_Comm -2080374784 [0] PetscFinalize(): PetscFinalize() called 8.240445 seconds (2.87 M allocations: 151.454 MiB, 8.16% gc time, 92.33% compilation time) 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 KSP Object: 1 MPI process type: gmres restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement happy breakdown tolerance 1e-30 maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using DEFAULT norm type for convergence test PC Object: 1 MPI process type: jacobi PC has not been set up so information may be incomplete type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=4, cols=4 total: nonzeros=10, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines ┌ Warning: 1 objects still not finalized before calling GridapPETSc.Finalize() └ @ GridapPETSc ~/.julia/packages/GridapPETSc/l6eIU/src/Environment.jl:45 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 10.019217 seconds (5.59 M allocations: 296.321 MiB, 11.42% gc time, 88.44% compilation time: 16% of which was recompilation) 0 SNES Function norm 3.605551275464e+00 1 SNES Function norm 4.444444444444e-01 2 SNES Function norm 7.111111111111e-02 3 SNES Function norm 3.936947327951e-03 4 SNES Function norm 1.525925473445e-05 5 SNES Function norm 2.328306437081e-10 6 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 6 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 1.486606874732e-01 1 SNES Function norm 8.402777777778e-03 2 SNES Function norm 6.831067663990e-05 3 SNES Function norm 4.665073682466e-09 4 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 4 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 3.605551275464e+00 1 SNES Function norm 4.444444444444e-01 2 SNES Function norm 7.111111111111e-02 3 SNES Function norm 3.936947327951e-03 4 SNES Function norm 1.525925473445e-05 5 SNES Function norm 2.328306437081e-10 6 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 6 2.622981 seconds (1.36 M allocations: 70.678 MiB, 21.15% gc time, 78.25% compilation time) [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType host available, initializing [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDevice host initialized, default device id 0, view FALSE, init type lazy [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType cuda not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType hip not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType sycl not available [0] PetscInitialize_Common(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-kuDQvZEo.(none) [0] PetscInitialize_Common(): Running on machine: GridapPETSc-against-kuDQvZEo [0] PetscInitialize_Common(): BLAS: Environment number of OpenBLAS threads 1 given by OPENBLAS_NUM_THREADS [0] PetscBLASSetNumThreads(): Setting number of threads used for OpenBLAS provided BLAS 1 [0] PetscCommDuplicate(): Duplicating a communicator 1140850689 -2080374784 max tags = 1073741823 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 3; storage space: 2 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines 4×3 GridapPETSc.PETScMatrix: -4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 3; storage space: 11 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines 4×3 GridapPETSc.PETScMatrix: -4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 3; storage space: 0 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines 4×3 GridapPETSc.PETScMatrix: -2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 4-element GridapPETSc.PETScVector: 6.0 0.0 0.0 1.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 ┌ Warning: 1 objects still not finalized before calling GridapPETSc.Finalize() └ @ GridapPETSc ~/.julia/packages/GridapPETSc/l6eIU/src/Environment.jl:45 [0] PetscFinalize(): PetscFinalize() called 7.267877 seconds (2.44 M allocations: 129.545 MiB, 7.35% gc time, 91.41% compilation time) 0 KSP Residual norm 0.0467654 1 KSP Residual norm 0.015417 2 KSP Residual norm 0.00327706 3 KSP Residual norm 0.000661172 4 KSP Residual norm 0.000252898 5 KSP Residual norm 0.00014497 6 KSP Residual norm 7.44567e-05 7 KSP Residual norm 5.31437e-05 8 KSP Residual norm 3.10604e-05 9 KSP Residual norm 1.88474e-05 10 KSP Residual norm 1.67308e-05 11 KSP Residual norm 1.30481e-05 12 KSP Residual norm 8.92208e-06 13 KSP Residual norm 6.2216e-06 14 KSP Residual norm 5.78002e-06 15 KSP Residual norm 4.40273e-06 16 KSP Residual norm 4.00261e-06 17 KSP Residual norm 2.70432e-06 18 KSP Residual norm 2.34576e-06 19 KSP Residual norm 1.94042e-06 20 KSP Residual norm 1.75753e-06 21 KSP Residual norm 1.73547e-06 22 KSP Residual norm 1.62163e-06 23 KSP Residual norm 1.25495e-06 24 KSP Residual norm 1.01893e-06 25 KSP Residual norm 8.34647e-07 26 KSP Residual norm 6.48088e-07 27 KSP Residual norm 5.14953e-07 28 KSP Residual norm 4.21736e-07 29 KSP Residual norm 3.82565e-07 30 KSP Residual norm 3.11746e-07 31 KSP Residual norm 2.49033e-07 32 KSP Residual norm 2.33299e-07 33 KSP Residual norm 2.07937e-07 34 KSP Residual norm 1.80063e-07 35 KSP Residual norm 1.38409e-07 36 KSP Residual norm 1.20657e-07 37 KSP Residual norm 9.80194e-08 38 KSP Residual norm 7.25573e-08 39 KSP Residual norm 4.91734e-08 40 KSP Residual norm 3.75952e-08 41 KSP Residual norm 2.78836e-08 42 KSP Residual norm 2.23736e-08 43 KSP Residual norm 1.57428e-08 44 KSP Residual norm 1.24789e-08 45 KSP Residual norm 9.28278e-09 46 KSP Residual norm 6.0542e-09 47 KSP Residual norm 4.92096e-09 48 KSP Residual norm 3.61052e-09 49 KSP Residual norm 3.1137e-09 50 KSP Residual norm 1.99343e-09 51 KSP Residual norm 1.768e-09 52 KSP Residual norm 1.32989e-09 53 KSP Residual norm 9.447e-10 54 KSP Residual norm 6.254e-10 55 KSP Residual norm 4.207e-10 Linear solve converged due to CONVERGED_RTOL iterations 55 WARNING! There are options you set that were not used! WARNING! could be spelling mistake, etc! There is one unused database option. It is: Option left: name:-mg_levels_esteig_ksp_type value: cg source: command line 182.157351 seconds (154.21 M allocations: 7.697 GiB, 3.19% gc time, 99.32% compilation time) 0 KSP Residual norm 1.000000000000e+01 1 KSP Residual norm 1.167900528050e-13 Linear solve converged due to CONVERGED_ITS iterations 1 193.622491 seconds (100.38 M allocations: 5.128 GiB, 1.42% gc time, 98.41% compilation time) 0 SNES Function norm 2.409822369424e+06 Linear solve converged due to CONVERGED_RTOL iterations 54 1 SNES Function norm 8.075649416004e+00 Linear solve converged due to CONVERGED_RTOL iterations 39 2 SNES Function norm 4.662170753064e-01 Linear solve converged due to CONVERGED_RTOL iterations 37 3 SNES Function norm 8.399998666394e-03 Linear solve converged due to CONVERGED_RTOL iterations 37 4 SNES Function norm 4.665302403823e-06 Linear solve converged due to CONVERGED_RTOL iterations 44 5 SNES Function norm 1.574127275597e-11 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 0 SNES Function norm 2.409822369424e+06 Linear solve converged due to CONVERGED_RTOL iterations 54 1 SNES Function norm 8.075649416004e+00 Linear solve converged due to CONVERGED_RTOL iterations 39 2 SNES Function norm 4.662170753064e-01 Linear solve converged due to CONVERGED_RTOL iterations 37 3 SNES Function norm 8.399998666394e-03 Linear solve converged due to CONVERGED_RTOL iterations 37 4 SNES Function norm 4.665302403823e-06 Linear solve converged due to CONVERGED_RTOL iterations 44 5 SNES Function norm 1.574127275597e-11 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 36.943609 seconds (17.27 M allocations: 907.054 MiB, 3.93% gc time, 94.49% compilation time: <1% of which was recompilation) 0 KSP Residual norm 8.550000000000e-03 1 KSP Residual norm 8.968352924103e-04 2 KSP Residual norm 9.188071216687e-05 3 KSP Residual norm 7.483127648168e-06 4 KSP Residual norm 6.216337456339e-07 5 KSP Residual norm 5.262981415328e-08 6 KSP Residual norm 4.447870837457e-09 7 KSP Residual norm 3.660520650081e-10 8 KSP Residual norm 3.000628543014e-11 9 KSP Residual norm 2.545610900010e-12 10 KSP Residual norm 2.084997771778e-13 Linear solve converged due to CONVERGED_RTOL iterations 10 KSP Object: 1 MPI process type: cg maximum iterations=1000, initial guess is zero tolerances: relative=1e-10, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test PC Object: 1 MPI process type: gamg type is MULTIPLICATIVE, levels=3 cycles=v Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options Threshold for dropping small values in graph on each level = -1. -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 0 MatCoarsen Object: (pc_gamg_) 1 MPI process type: misk Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11385 operator = 1.15878 Per-level complexity: op = operator, int = interpolation #equations | #active PEs | avg nnz/row op | avg nnz/row int 7 1 7 0 76 1 32 4 729 1 22 6 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_) 1 MPI process type: bjacobi number of blocks = 1 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks KSP Object: (mg_coarse_sub_) 1 MPI process type: preonly maximum iterations=1, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_sub_) 1 MPI process type: cholesky out-of-place factorization tolerance for zero pivot 2.22045e-14 matrix ordering: nd factor fill ratio given 5., needed 1. Factored matrix follows: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqsbaij rows=7, cols=7 package used to perform factorization: petsc total: nonzeros=28, allocated nonzeros=28 block size is 1 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 Down solver (pre-smoother) on level 1 ------------------------------- KSP Object: (mg_levels_1_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.255929, max 2.81522 eigenvalues provided (min 0.325601, max 2.55929) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_1_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=76, cols=76 total: nonzeros=2432, allocated nonzeros=2432 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) Down solver (pre-smoother) on level 2 ------------------------------- KSP Object: (mg_levels_2_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.143003, max 1.57304 eigenvalues provided (min 0.109061, max 1.43003) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_2_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines 0 KSP Residual norm 8.550000000000e-03 1 KSP Residual norm 8.968352924103e-04 2 KSP Residual norm 9.188071216687e-05 3 KSP Residual norm 7.483127648168e-06 4 KSP Residual norm 6.216337456339e-07 5 KSP Residual norm 5.262981415328e-08 6 KSP Residual norm 4.447870837457e-09 7 KSP Residual norm 3.660520650081e-10 8 KSP Residual norm 3.000628543014e-11 9 KSP Residual norm 2.545610900010e-12 10 KSP Residual norm 2.084997771778e-13 Linear solve converged due to CONVERGED_RTOL iterations 10 KSP Object: 1 MPI process type: cg maximum iterations=1000, initial guess is zero tolerances: relative=1e-10, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test PC Object: 1 MPI process type: gamg type is MULTIPLICATIVE, levels=3 cycles=v Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options Threshold for dropping small values in graph on each level = -1. -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 0 MatCoarsen Object: (pc_gamg_) 1 MPI process type: misk Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11385 operator = 1.15878 Per-level complexity: op = operator, int = interpolation #equations | #active PEs | avg nnz/row op | avg nnz/row int 7 1 7 0 76 1 32 4 729 1 22 6 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_) 1 MPI process type: bjacobi number of blocks = 1 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks KSP Object: (mg_coarse_sub_) 1 MPI process type: preonly maximum iterations=1, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_sub_) 1 MPI process type: cholesky out-of-place factorization tolerance for zero pivot 2.22045e-14 matrix ordering: nd factor fill ratio given 5., needed 1. Factored matrix follows: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqsbaij rows=7, cols=7 package used to perform factorization: petsc total: nonzeros=28, allocated nonzeros=28 block size is 1 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 Down solver (pre-smoother) on level 1 ------------------------------- KSP Object: (mg_levels_1_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.255929, max 2.81522 eigenvalues provided (min 0.325601, max 2.55929) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_1_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=76, cols=76 total: nonzeros=2432, allocated nonzeros=2432 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) Down solver (pre-smoother) on level 2 ------------------------------- KSP Object: (mg_levels_2_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.143003, max 1.57304 eigenvalues provided (min 0.109061, max 1.43003) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_2_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines 0 KSP Residual norm 8.550000000000e-03 1 KSP Residual norm 8.968352924103e-04 2 KSP Residual norm 9.188071216687e-05 3 KSP Residual norm 7.483127648168e-06 4 KSP Residual norm 6.216337456339e-07 5 KSP Residual norm 5.262981415328e-08 6 KSP Residual norm 4.447870837457e-09 7 KSP Residual norm 3.660520650081e-10 8 KSP Residual norm 3.000628543014e-11 9 KSP Residual norm 2.545610900010e-12 10 KSP Residual norm 2.084997771778e-13 Linear solve converged due to CONVERGED_RTOL iterations 10 KSP Object: 1 MPI process type: cg maximum iterations=1000, initial guess is zero tolerances: relative=1e-10, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test PC Object: 1 MPI process type: gamg type is MULTIPLICATIVE, levels=3 cycles=v Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options Threshold for dropping small values in graph on each level = -1. -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 0 MatCoarsen Object: (pc_gamg_) 1 MPI process type: misk Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11385 operator = 1.15878 Per-level complexity: op = operator, int = interpolation #equations | #active PEs | avg nnz/row op | avg nnz/row int 7 1 7 0 76 1 32 4 729 1 22 6 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_) 1 MPI process type: bjacobi number of blocks = 1 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks KSP Object: (mg_coarse_sub_) 1 MPI process type: preonly maximum iterations=1, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_sub_) 1 MPI process type: cholesky out-of-place factorization tolerance for zero pivot 2.22045e-14 matrix ordering: nd factor fill ratio given 5., needed 1. Factored matrix follows: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqsbaij rows=7, cols=7 package used to perform factorization: petsc total: nonzeros=28, allocated nonzeros=28 block size is 1 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 Down solver (pre-smoother) on level 1 ------------------------------- KSP Object: (mg_levels_1_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.255929, max 2.81522 eigenvalues provided (min 0.325601, max 2.55929) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_1_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=76, cols=76 total: nonzeros=2432, allocated nonzeros=2432 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) Down solver (pre-smoother) on level 2 ------------------------------- KSP Object: (mg_levels_2_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.143003, max 1.57304 eigenvalues provided (min 0.109061, max 1.43003) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_2_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=39304 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=39304 total number of mallocs used during MatSetValues calls=0 not using I-node routines 0 KSP Residual norm 8.550000000000e-03 1 KSP Residual norm 8.968352924103e-04 2 KSP Residual norm 9.188071216687e-05 3 KSP Residual norm 7.483127648168e-06 4 KSP Residual norm 6.216337456339e-07 5 KSP Residual norm 5.262981415328e-08 6 KSP Residual norm 4.447870837457e-09 7 KSP Residual norm 3.660520650081e-10 8 KSP Residual norm 3.000628543014e-11 9 KSP Residual norm 2.545610900010e-12 10 KSP Residual norm 2.084997771778e-13 Linear solve converged due to CONVERGED_RTOL iterations 10 KSP Object: 1 MPI process type: cg maximum iterations=1000, initial guess is zero tolerances: relative=1e-10, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test PC Object: 1 MPI process type: gamg type is MULTIPLICATIVE, levels=3 cycles=v Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options Threshold for dropping small values in graph on each level = -1. -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 0 MatCoarsen Object: (pc_gamg_) 1 MPI process type: misk Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11385 operator = 1.15878 Per-level complexity: op = operator, int = interpolation #equations | #active PEs | avg nnz/row op | avg nnz/row int 7 1 7 0 76 1 32 4 729 1 22 6 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_) 1 MPI process type: bjacobi number of blocks = 1 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks KSP Object: (mg_coarse_sub_) 1 MPI process type: preonly maximum iterations=1, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_sub_) 1 MPI process type: cholesky out-of-place factorization tolerance for zero pivot 2.22045e-14 matrix ordering: nd factor fill ratio given 5., needed 1. Factored matrix follows: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqsbaij rows=7, cols=7 package used to perform factorization: petsc total: nonzeros=28, allocated nonzeros=28 block size is 1 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 Down solver (pre-smoother) on level 1 ------------------------------- KSP Object: (mg_levels_1_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.255929, max 2.81522 eigenvalues provided (min 0.325601, max 2.55929) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_1_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=76, cols=76 total: nonzeros=2432, allocated nonzeros=2432 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) Down solver (pre-smoother) on level 2 ------------------------------- KSP Object: (mg_levels_2_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.143003, max 1.57304 eigenvalues provided (min 0.109061, max 1.43003) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_2_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=39304 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=39304 total number of mallocs used during MatSetValues calls=0 not using I-node routines WARNING! There are options you set that were not used! WARNING! could be spelling mistake, etc! There is one unused database option. It is: Option left: name:-mg_levels_esteig_ksp_type value: cg source: command line 18.950233 seconds (8.75 M allocations: 451.818 MiB, 6.10% gc time, 92.99% compilation time) Test Summary: | Pass Total Time SERIAL | 131 131 8m03.0s 483.353644 seconds (299.31 M allocations: 15.133 GiB, 3.14% gc time, 95.15% compilation time: <1% of which was recompilation) [1, 2, 3, 5, 6] [4, 5, 7, 3, 6] [6, 7, 4] Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. 3-element PartitionedArrays.DebugArray{SparseArrays.SparseMatrixCSC{Float64, Int64}, 1}: [1] = sparse([1, 2, 3, 1, 3], [1, 2, 3, 4, 5], [9.0, 9.0, 9.0, 1.0, 1.0], 5, 5) [2] = sparse([1, 2, 2, 2, 1], [1, 2, 3, 4, 5], [9.0, 9.0, 1.0, 9.0, 1.0], 5, 5) [3] = sparse([1, 2, 1], [1, 2, 3], [9.0, 9.0, 1.0], 3, 3) 3-element PartitionedArrays.DebugArray{SparseArrays.SparseMatrixCSC{Float64, Int64}, 1}: [1] = sparse([1, 2, 3, 5, 4, 6, 1, 5, 3, 4, 6, 5, 7], [1, 2, 3, 3, 4, 4, 5, 5, 6, 6, 6, 7, 7], [9.0, 9.0, 9.0, 9.0, 9.0, 1.0, 1.0, 9.0, 1.0, 1.0, 9.0, 1.0, 9.0], 7, 7) [2] = sparse(Int64[], Int64[], Float64[], 2, 2) [3] = sparse(Int64[], Int64[], Float64[], 2, 2) Mat Object: 1 MPI process type: seqaij row 0: (0, 9.) (4, 1.) row 1: (1, 9.) row 2: (2, 9.) (5, 1.) row 3: (3, 9.) (5, 1.) row 4: (2, 9.) (4, 9.) (6, 1.) row 5: (3, 1.) (5, 9.) row 6: (6, 9.) Linear solve converged due to CONVERGED_RTOL iterations 5 Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. Vec Object: 1 MPI process type: seq 140. 180. 330. 420. 790. 580. 630. Linear solve converged due to CONVERGED_RTOL iterations 5 Vec Object: 1 MPI process type: seq 140. 180. 330. 420. 790. 580. 630. Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. Linear solve converged due to CONVERGED_RTOL iterations 5 Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. Vec Object: 1 MPI process type: seq 140. 180. 330. 420. 790. 580. 630. Linear solve converged due to CONVERGED_RTOL iterations 5 Vec Object: 1 MPI process type: seq 140. 180. 330. 420. 790. 580. 630. Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. Vec Object: 1 MPI process type: seq 10. 20. 30. 40. 50. 60. 70. 98.974985 seconds (46.95 M allocations: 2.398 GiB, 2.25% gc time, 76.78% compilation time) Linear solve converged due to CONVERGED_RTOL iterations 28 160.713057 seconds (84.61 M allocations: 4.278 GiB, 1.74% gc time, 93.77% compilation time: 2% of which was recompilation) 0 SNES Function norm 2.409822369424e+06 Linear solve converged due to CONVERGED_RTOL iterations 258 1 SNES Function norm 8.077651079465e+00 Linear solve converged due to CONVERGED_RTOL iterations 225 2 SNES Function norm 4.661493250862e-01 Linear solve converged due to CONVERGED_RTOL iterations 215 3 SNES Function norm 8.397548297115e-03 Linear solve converged due to CONVERGED_RTOL iterations 246 4 SNES Function norm 4.669108164729e-06 Linear solve converged due to CONVERGED_RTOL iterations 313 5 SNES Function norm 4.635797857203e-11 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 0 SNES Function norm 4.635797857203e-11 Linear solve converged due to CONVERGED_RTOL iterations 287 1 SNES Function norm 5.122905122569e-13 Nonlinear solve converged due to CONVERGED_SNORM_RELATIVE iterations 1 0 SNES Function norm 2.409822369424e+06 Linear solve converged due to CONVERGED_RTOL iterations 258 1 SNES Function norm 8.077651079465e+00 Linear solve converged due to CONVERGED_RTOL iterations 225 2 SNES Function norm 4.661493250862e-01 Linear solve converged due to CONVERGED_RTOL iterations 215 3 SNES Function norm 8.397548297118e-03 Linear solve converged due to CONVERGED_RTOL iterations 246 4 SNES Function norm 4.669108164798e-06 Linear solve converged due to CONVERGED_RTOL iterations 313 5 SNES Function norm 4.635773814233e-11 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 0 SNES Function norm 4.635773814233e-11 Linear solve converged due to CONVERGED_RTOL iterations 287 1 SNES Function norm 5.073718370359e-13 Nonlinear solve converged due to CONVERGED_SNORM_RELATIVE iterations 1 79.465122 seconds (26.63 M allocations: 1.421 GiB, 2.55% gc time, 65.83% compilation time: <1% of which was recompilation) Test Summary: | Pass Total Time SEQUENTIAL | 61 61 5m40.3s 340.331143 seconds (158.37 M allocations: 8.106 GiB, 2.07% gc time, 82.30% compilation time: 1% of which was recompilation) ┌ Warning: mpiexec() is deprecated, use the non-do-block form │ caller = ip:0x0 └ @ Core :-1 ERROR: ERROR: ERROR: LoadError: LoadError: LoadError: ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. Stacktrace: Stacktrace: Stacktrace: [1] [1] [1] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PartitionedArraysTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PartitionedArraysTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PartitionedArraysTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PartitionedArraysTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PartitionedArraysTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PartitionedArraysTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl:1 PartitionedArrays: Error During Test at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:9 Got exception outside of a @test LoadError: failed process: Process(`/home/pkgeval/.julia/artifacts/117d9048128625bb3418b0b4cca48739c5d28740/bin/mpiexec -n 3 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --project=/home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/../.. /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTests.jl`, ProcessExited(1)) [1] Stacktrace: [1] pipeline_error @ ./process.jl:611 [inlined] [2] run(::Cmd; wait::Bool) @ Base ./process.jl:526 [3] run @ ./process.jl:523 [inlined] [4] (::Main.GridapPETScTests.GridapPETScMPITests.PartitionedArraysTestsRun.var"#3#4"{Int64, String, String, String})(cmd::String) @ Main.GridapPETScTests.GridapPETScMPITests.PartitionedArraysTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:11 [5] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:49 [6] withenv(::JLLWrappers.var"#withenv_executable_wrapper##0#withenv_executable_wrapper##1"{Main.GridapPETScTests.GridapPETScMPITests.PartitionedArraysTestsRun.var"#3#4"{Int64, String, String, String}, String}, ::Pair{String, String}, ::Vararg{Pair{String, String}}) @ Base ./env.jl:265 [7] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:48 [8] #mpiexec#1 @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:28 [inlined] [9] mpiexec @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:25 [inlined] [10] run_mpi_driver(; procs::Int64, file::String) @ Main.GridapPETScTests.GridapPETScMPITests.PartitionedArraysTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:7 [11] kwcall(::@NamedTuple{procs::Int64, file::String}, ::typeof(Main.GridapPETScTests.GridapPETScMPITests.PartitionedArraysTestsRun.run_mpi_driver)) @ Main.GridapPETScTests.GridapPETScMPITests.PartitionedArraysTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:3 [12] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTestsRun.jl:3 [13] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [14] IncludeInto @ ./Base.jl:311 [inlined] [15] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:9 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:9 [inlined] [18] macro expansion @ ./timing.jl:730 [inlined] [19] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:394 [20] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [21] IncludeInto @ ./Base.jl:311 [inlined] [22] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [25] macro expansion @ ./timing.jl:730 [inlined] [26] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:394 [27] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [28] top-level scope @ none:6 [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [31] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PartitionedArraysTestsRun.jl:1 26.434612 seconds (2.12 M allocations: 113.406 MiB, 19.42% compilation time: <1% of which was recompilation) ERROR: ERROR: LoadError: LoadError: ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. Stacktrace: Stacktrace: [1] [1] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTests.jl:1 PLaplacianTests: Error During Test at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:10 Got exception outside of a @test LoadError: failed process: Process(`/home/pkgeval/.julia/artifacts/117d9048128625bb3418b0b4cca48739c5d28740/bin/mpiexec -n 2 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --project=/home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/../.. /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTests.jl`, ProcessExited(1)) [1] Stacktrace: [1] pipeline_error @ ./process.jl:611 [inlined] [2] run(::Cmd; wait::Bool) @ Base ./process.jl:526 [3] run @ ./process.jl:523 [inlined] [4] (::Main.GridapPETScTests.GridapPETScMPITests.PLaplacianTestsRun.var"#3#4"{Int64, String, String, String})(cmd::String) @ Main.GridapPETScTests.GridapPETScMPITests.PLaplacianTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:11 [5] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:49 [6] withenv(::JLLWrappers.var"#withenv_executable_wrapper##0#withenv_executable_wrapper##1"{Main.GridapPETScTests.GridapPETScMPITests.PLaplacianTestsRun.var"#3#4"{Int64, String, String, String}, String}, ::Pair{String, String}, ::Vararg{Pair{String, String}}) @ Base ./env.jl:265 [7] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:48 [8] #mpiexec#1 @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:28 [inlined] [9] mpiexec @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:25 [inlined] [10] run_mpi_driver(; procs::Int64, file::String) @ Main.GridapPETScTests.GridapPETScMPITests.PLaplacianTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:7 [11] kwcall(::@NamedTuple{procs::Int64, file::String}, ::typeof(Main.GridapPETScTests.GridapPETScMPITests.PLaplacianTestsRun.run_mpi_driver)) @ Main.GridapPETScTests.GridapPETScMPITests.PLaplacianTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:3 [12] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTestsRun.jl:3 [13] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [14] IncludeInto @ ./Base.jl:311 [inlined] [15] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:10 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:10 [inlined] [18] macro expansion @ ./timing.jl:730 [inlined] [19] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:394 [20] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [21] IncludeInto @ ./Base.jl:311 [inlined] [22] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [25] macro expansion @ ./timing.jl:730 [inlined] [26] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:394 [27] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [28] top-level scope @ none:6 [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [31] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PLaplacianTestsRun.jl:1 14.696578 seconds (66.28 k allocations: 3.680 MiB, 1.50% compilation time) ERROR: ERROR: ERROR: ERROR: LoadError: LoadError: LoadError: LoadError: ArgumentError: ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. Stacktrace: Stacktrace: Stacktrace: Stacktrace: [1] [1] [1] [1] __require_prelocked(pkg::Base.PkgId, env::String)__require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PLaplacianTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl:1 GCTests: Error During Test at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:11 Got exception outside of a @test LoadError: failed process: Process(`/home/pkgeval/.julia/artifacts/117d9048128625bb3418b0b4cca48739c5d28740/bin/mpiexec -n 4 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --project=/home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/../.. /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTests.jl`, ProcessExited(1)) [1] Stacktrace: [1] pipeline_error @ ./process.jl:611 [inlined] [2] run(::Cmd; wait::Bool) @ Base ./process.jl:526 [3] run @ ./process.jl:523 [inlined] [4] (::Main.GridapPETScTests.GridapPETScMPITests.GCTestsRun.var"#3#4"{Int64, String, String, String})(cmd::String) @ Main.GridapPETScTests.GridapPETScMPITests.GCTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:11 [5] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:49 [6] withenv(::JLLWrappers.var"#withenv_executable_wrapper##0#withenv_executable_wrapper##1"{Main.GridapPETScTests.GridapPETScMPITests.GCTestsRun.var"#3#4"{Int64, String, String, String}, String}, ::Pair{String, String}, ::Vararg{Pair{String, String}}) @ Base ./env.jl:265 [7] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:48 [8] #mpiexec#1 @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:28 [inlined] [9] mpiexec @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:25 [inlined] [10] run_mpi_driver(; procs::Int64, file::String) @ Main.GridapPETScTests.GridapPETScMPITests.GCTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:7 [11] kwcall(::@NamedTuple{procs::Int64, file::String}, ::typeof(Main.GridapPETScTests.GridapPETScMPITests.GCTestsRun.run_mpi_driver)) @ Main.GridapPETScTests.GridapPETScMPITests.GCTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:3 [12] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTestsRun.jl:3 [13] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [14] IncludeInto @ ./Base.jl:311 [inlined] [15] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:11 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:11 [inlined] [18] macro expansion @ ./timing.jl:730 [inlined] [19] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:394 [20] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [21] IncludeInto @ ./Base.jl:311 [inlined] [22] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [25] macro expansion @ ./timing.jl:730 [inlined] [26] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:394 [27] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [28] top-level scope @ none:6 [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [31] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/GCTestsRun.jl:1 28.494790 seconds (66.27 k allocations: 3.678 MiB, 0.78% compilation time) ERROR: ERROR: ERROR: ERROR: LoadError: LoadError: LoadError: LoadError: ArgumentError: Package SparseMatricesCSR [a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package SparseMatricesCSR [a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package SparseMatricesCSR [a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package SparseMatricesCSR [a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. Stacktrace: Stacktrace: Stacktrace: Stacktrace: [1] [1] [1] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 [1] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/PoissonTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl:1 PoissonTests: Error During Test at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:12 Got exception outside of a @test LoadError: failed process: Process(`/home/pkgeval/.julia/artifacts/117d9048128625bb3418b0b4cca48739c5d28740/bin/mpiexec -n 4 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --project=/home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/../.. /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTests.jl`, ProcessExited(1)) [1] Stacktrace: [1] pipeline_error @ ./process.jl:611 [inlined] [2] run(::Cmd; wait::Bool) @ Base ./process.jl:526 [3] run @ ./process.jl:523 [inlined] [4] (::Main.GridapPETScTests.GridapPETScMPITests.PoissonTestsRun.var"#3#4"{Int64, String, String, String})(cmd::String) @ Main.GridapPETScTests.GridapPETScMPITests.PoissonTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:11 [5] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:49 [6] withenv(::JLLWrappers.var"#withenv_executable_wrapper##0#withenv_executable_wrapper##1"{Main.GridapPETScTests.GridapPETScMPITests.PoissonTestsRun.var"#3#4"{Int64, String, String, String}, String}, ::Pair{String, String}, ::Vararg{Pair{String, String}}) @ Base ./env.jl:265 [7] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:48 [8] #mpiexec#1 @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:28 [inlined] [9] mpiexec @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:25 [inlined] [10] run_mpi_driver(; procs::Int64, file::String) @ Main.GridapPETScTests.GridapPETScMPITests.PoissonTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:7 [11] kwcall(::@NamedTuple{procs::Int64, file::String}, ::typeof(Main.GridapPETScTests.GridapPETScMPITests.PoissonTestsRun.run_mpi_driver)) @ Main.GridapPETScTests.GridapPETScMPITests.PoissonTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:3 [12] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTestsRun.jl:3 [13] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [14] IncludeInto @ ./Base.jl:311 [inlined] [15] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:12 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:12 [inlined] [18] macro expansion @ ./timing.jl:730 [inlined] [19] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:394 [20] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [21] IncludeInto @ ./Base.jl:311 [inlined] [22] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [25] macro expansion @ ./timing.jl:730 [inlined] [26] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:394 [27] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [28] top-level scope @ none:6 [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [31] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/PoissonTestsRun.jl:1 28.900594 seconds (66.27 k allocations: 3.677 MiB, 0.78% compilation time) ERROR: ERROR: ERROR: ERROR: LoadError: LoadError: LoadError: LoadError: ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. ArgumentError: Package Gridap [56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e] is required but does not seem to be installed: - Run `Pkg.instantiate()` to install all recorded dependencies. Stacktrace: Stacktrace: Stacktrace: Stacktrace: [1] [1] [1] [1] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2707 [2] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2585 [3] macro expansion @ ./loading.jl:2513 [inlined] [4] macro expansion @ ./lock.jl:376 [inlined] [5] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2477 [6] require @ ./loading.jl:2453 [inlined] [7] eval_import_path @ ./module.jl:36 [inlined] [8] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [9] _eval_using @ ./module.jl:137 [inlined] [10] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [11] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 [14] include(mod::Module, _path::String) @ Base ./Base.jl:309 [15] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [16] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/DarcyTests.jl:1 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl:1 DarcyTests: Error During Test at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:13 Got exception outside of a @test LoadError: failed process: Process(`/home/pkgeval/.julia/artifacts/117d9048128625bb3418b0b4cca48739c5d28740/bin/mpiexec -n 4 /opt/julia/bin/julia -C native -J/opt/julia/lib/julia/sys.so --depwarn=yes --check-bounds=yes --pkgimages=existing -g1 --startup-file=no --project=/home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/../.. /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTests.jl`, ProcessExited(1)) [1] Stacktrace: [1] pipeline_error @ ./process.jl:611 [inlined] [2] run(::Cmd; wait::Bool) @ Base ./process.jl:526 [3] run @ ./process.jl:523 [inlined] [4] (::Main.GridapPETScTests.GridapPETScMPITests.DarcyTestsRun.var"#3#4"{Int64, String, String, String})(cmd::String) @ Main.GridapPETScTests.GridapPETScMPITests.DarcyTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:11 [5] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:49 [6] withenv(::JLLWrappers.var"#withenv_executable_wrapper##0#withenv_executable_wrapper##1"{Main.GridapPETScTests.GridapPETScMPITests.DarcyTestsRun.var"#3#4"{Int64, String, String, String}, String}, ::Pair{String, String}, ::Vararg{Pair{String, String}}) @ Base ./env.jl:265 [7] top-level scope @ ~/.julia/packages/JLLWrappers/m2Pjh/src/runtime.jl:48 [8] #mpiexec#1 @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:28 [inlined] [9] mpiexec @ ~/.julia/packages/JLLWrappers/m2Pjh/src/products/executable_generators.jl:25 [inlined] [10] run_mpi_driver(; procs::Int64, file::String) @ Main.GridapPETScTests.GridapPETScMPITests.DarcyTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:7 [11] kwcall(::@NamedTuple{procs::Int64, file::String}, ::typeof(Main.GridapPETScTests.GridapPETScMPITests.DarcyTestsRun.run_mpi_driver)) @ Main.GridapPETScTests.GridapPETScMPITests.DarcyTestsRun ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/mpiexec.jl:3 [12] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTestsRun.jl:3 [13] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [14] IncludeInto @ ./Base.jl:311 [inlined] [15] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:13 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:13 [inlined] [18] macro expansion @ ./timing.jl:730 [inlined] [19] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/mpi/runtests.jl:394 [20] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [21] IncludeInto @ ./Base.jl:311 [inlined] [22] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:7 [inlined] [25] macro expansion @ ./timing.jl:730 [inlined] [26] top-level scope @ ~/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:394 [27] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [28] top-level scope @ none:6 [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [31] _start() @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/mpi/DarcyTestsRun.jl:1 28.762750 seconds (66.28 k allocations: 3.678 MiB, 0.75% compilation time) Test Summary: | Error Total Time MPI | 5 5 2m09.0s PartitionedArrays | 1 1 26.4s PLaplacianTests | 1 1 14.7s GCTests | 1 1 28.5s PoissonTests | 1 1 28.9s DarcyTests | 1 1 28.8s RNG of the outermost testset: Random.Xoshiro(0x690f24f6105ea502, 0x41254c02aef19356, 0x579bc1464dfb0647, 0x170889a372054d86, 0x83c592d5c13f9d11) ERROR: LoadError: Some tests did not pass: 0 passed, 0 failed, 5 errored, 0 broken. in expression starting at /home/pkgeval/.julia/packages/GridapPETSc/l6eIU/test/runtests.jl:1 Testing failed after 916.62s ERROR: LoadError: Package GridapPETSc errored during testing Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Operations.jl:2946 [3] test @ /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Operations.jl:2795 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:572 [5] kwcall(::@NamedTuple{julia_args::Cmd, io::IOContext{IO}}, ::typeof(Pkg.API.test), ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:548 [6] test(pkgs::Vector{PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:172 [7] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:161 [8] test(pkgs::Vector{String}; kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:160 [9] test @ /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:160 [inlined] [10] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkg::String) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:159 [11] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:219 [12] include(mod::Module, _path::String) @ Base ./Base.jl:309 [13] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [14] _start() @ Base ./client.jl:577 in expression starting at /PkgEval.jl/scripts/evaluate.jl:210 PkgEval failed after 1538.47s: package tests unexpectedly errored