Package evaluation of GridapPETSc on Julia 1.13.0-DEV.1200 (a5576b4ddb*) started at 2025-09-28T23:03:12.757 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Set-up completed after 10.95s ################################################################################ # Installation # Installing GridapPETSc... Resolving package versions... Installed LightXML ───────────────── v0.9.2 Installed DiffResults ────────────── v1.1.0 Installed MacroTools ─────────────── v0.5.16 Installed FillArrays ─────────────── v1.14.0 Installed DataStructures ─────────── v0.19.1 Installed ForwardDiff ────────────── v1.2.1 Installed MPI ────────────────────── v0.20.23 Installed JLD2 ───────────────────── v0.5.15 Installed PkgVersion ─────────────── v0.3.3 Installed AbstractTrees ──────────── v0.4.5 Installed MKL_jll ────────────────── v2025.2.0+0 Installed SimpleUnPack ───────────── v1.1.0 Installed Adapt ──────────────────── v4.4.0 Installed MPICH_jll ──────────────── v4.3.1+0 Installed ConstructionBase ───────── v1.6.0 Installed PolynomialBases ────────── v0.4.25 Installed oneTBB_jll ─────────────── v2022.0.0+0 Installed PETSc_jll ──────────────── v3.22.0+0 Installed CodecZlib ──────────────── v0.7.8 Installed MicrosoftMPI_jll ───────── v10.1.4+3 Installed ArrayInterface ─────────── v7.20.0 Installed TranscodingStreams ─────── v0.11.3 Installed OrderedCollections ─────── v1.8.1 Installed HashArrayMappedTries ───── v0.2.0 Installed NearestNeighbors ───────── v0.4.22 Installed RecipesBase ────────────── v1.3.4 Installed Combinatorics ──────────── v1.0.3 Installed IntelOpenMP_jll ────────── v2025.2.0+0 Installed StatsAPI ───────────────── v1.7.1 Installed Statistics ─────────────── v1.11.1 Installed OffsetArrays ───────────── v1.17.0 Installed CommonSubexpressions ───── v0.3.1 Installed PrecompileTools ────────── v1.3.3 Installed FileIO ─────────────────── v1.17.0 Installed PartitionedArrays ──────── v0.3.5 Installed ArgCheck ───────────────── v2.5.0 Installed StaticArraysCore ───────── v1.4.3 Installed StaticArrays ───────────── v1.9.15 Installed AbstractFFTs ───────────── v1.5.0 Installed Distances ──────────────── v0.10.12 Installed IrrationalConstants ────── v0.2.4 Installed BSON ───────────────────── v0.3.9 Installed FastGaussQuadrature ────── v1.0.2 Installed SCALAPACK32_jll ────────── v2.2.1+1 Installed SparseMatricesCSR ──────── v0.6.9 Installed NaNMath ────────────────── v1.1.3 Installed QuadGK ─────────────────── v2.11.2 Installed OpenSpecFun_jll ────────── v0.5.6+0 Installed Requires ───────────────── v1.3.1 Installed ArrayLayouts ───────────── v1.11.2 Installed NLsolve ────────────────── v4.5.1 Installed OpenMPI_jll ────────────── v5.0.8+0 Installed OpenBLAS32_jll ─────────── v0.3.29+0 Installed DiffRules ──────────────── v1.15.1 Installed LogExpFunctions ────────── v0.3.29 Installed JSON ───────────────────── v0.21.4 Installed Parsers ────────────────── v2.8.3 Installed AutoHashEquals ─────────── v2.2.0 Installed Gridap ─────────────────── v0.19.5 Installed XML2_jll ───────────────── v2.14.4+0 Installed ScopedValues ───────────── v1.5.0 Installed Parameters ─────────────── v0.12.3 Installed NLSolversBase ──────────── v7.10.0 Installed Hwloc_jll ──────────────── v2.12.1+0 Installed FiniteDiff ─────────────── v2.28.1 Installed SpecialFunctions ───────── v2.5.1 Installed BlockArrays ────────────── v1.7.2 Installed MPIPreferences ─────────── v0.1.11 Installed Reexport ───────────────── v1.2.2 Installed FFTW_jll ───────────────── v3.3.11+0 Installed Setfield ───────────────── v1.1.2 Installed FFTW ───────────────────── v1.10.0 Installed Preferences ────────────── v1.5.0 Installed IterativeSolvers ───────── v0.9.4 Installed Libiconv_jll ───────────── v1.18.0+0 Installed JLLWrappers ────────────── v1.7.1 Installed MPItrampoline_jll ──────── v5.5.4+0 Installed WriteVTK ───────────────── v1.21.2 Installed LineSearches ───────────── v7.4.0 Installed GridapPETSc ────────────── v0.5.6 Installed VTKBase ────────────────── v1.0.1 Installed GridapDistributed ──────── v0.4.9 Installed UnPack ─────────────────── v1.0.2 Installed ADTypes ────────────────── v1.18.0 Installed DocStringExtensions ────── v0.9.5 Installed CircularArrays ─────────── v1.4.0 Installed DifferentiationInterface ─ v0.7.7 Installing 10 artifacts Installed artifact OpenSpecFun 194.9 KiB Installed artifact oneTBB 435.6 KiB Installed artifact SCALAPACK32 2.3 MiB Installed artifact Libiconv 1.9 MiB Installed artifact XML2 2.5 MiB Installed artifact FFTW 2.2 MiB Installed artifact Hwloc 3.5 MiB Installed artifact MPICH 3.3 MiB Installed artifact OpenBLAS32 10.0 MiB Installed artifact PETSc 236.3 MiB Updating `~/.julia/environments/v1.13/Project.toml` [bcdc36c2] + GridapPETSc v0.5.6 Updating `~/.julia/environments/v1.13/Manifest.toml` [47edcb42] + ADTypes v1.18.0 [621f4979] + AbstractFFTs v1.5.0 [1520ce14] + AbstractTrees v0.4.5 [79e6a3ab] + Adapt v4.4.0 [dce04be8] + ArgCheck v2.5.0 [4fba245c] + ArrayInterface v7.20.0 [4c555306] + ArrayLayouts v1.11.2 [15f4f7f2] + AutoHashEquals v2.2.0 [fbb218c0] + BSON v0.3.9 [8e7c35d0] + BlockArrays v1.7.2 [7a955b69] + CircularArrays v1.4.0 [944b1d66] + CodecZlib v0.7.8 [861a8166] + Combinatorics v1.0.3 [bbf7d656] + CommonSubexpressions v0.3.1 [187b0558] + ConstructionBase v1.6.0 [864edb3b] + DataStructures v0.19.1 [163ba53b] + DiffResults v1.1.0 [b552c78f] + DiffRules v1.15.1 [a0c0ee7d] + DifferentiationInterface v0.7.7 [b4f34e82] + Distances v0.10.12 [ffbed154] + DocStringExtensions v0.9.5 [7a1cc6ca] + FFTW v1.10.0 [442a2c76] + FastGaussQuadrature v1.0.2 [5789e2e9] + FileIO v1.17.0 [1a297f60] + FillArrays v1.14.0 [6a86dc24] + FiniteDiff v2.28.1 [f6369f11] + ForwardDiff v1.2.1 [56d4f2e9] + Gridap v0.19.5 [f9701e48] + GridapDistributed v0.4.9 [bcdc36c2] + GridapPETSc v0.5.6 [076d061b] + HashArrayMappedTries v0.2.0 [92d709cd] + IrrationalConstants v0.2.4 [42fd0dbc] + IterativeSolvers v0.9.4 ⌅ [033835bb] + JLD2 v0.5.15 [692b3bcd] + JLLWrappers v1.7.1 [682c06a0] + JSON v0.21.4 [9c8b4983] + LightXML v0.9.2 [d3d80556] + LineSearches v7.4.0 [2ab3a3ac] + LogExpFunctions v0.3.29 [da04e1cc] + MPI v0.20.23 [3da0fdf6] + MPIPreferences v0.1.11 [1914dd2f] + MacroTools v0.5.16 [d41bc354] + NLSolversBase v7.10.0 [2774e3e8] + NLsolve v4.5.1 [77ba4419] + NaNMath v1.1.3 [b8a86587] + NearestNeighbors v0.4.22 [6fe1bfb0] + OffsetArrays v1.17.0 [bac558e1] + OrderedCollections v1.8.1 [d96e819e] + Parameters v0.12.3 [69de0a69] + Parsers v2.8.3 ⌅ [5a9dfac6] + PartitionedArrays v0.3.5 [eebad327] + PkgVersion v0.3.3 [c74db56a] + PolynomialBases v0.4.25 [aea7be01] + PrecompileTools v1.3.3 [21216c6a] + Preferences v1.5.0 [1fd47b50] + QuadGK v2.11.2 [3cdcf5f2] + RecipesBase v1.3.4 [189a3867] + Reexport v1.2.2 [ae029012] + Requires v1.3.1 [7e506255] + ScopedValues v1.5.0 [efcf1570] + Setfield v1.1.2 [ce78b400] + SimpleUnPack v1.1.0 [a0a7dd2c] + SparseMatricesCSR v0.6.9 [276daf66] + SpecialFunctions v2.5.1 [90137ffa] + StaticArrays v1.9.15 [1e83bf80] + StaticArraysCore v1.4.3 [10745b16] + Statistics v1.11.1 [82ae8749] + StatsAPI v1.7.1 [3bb67fe8] + TranscodingStreams v0.11.3 [3a884ed6] + UnPack v1.0.2 [4004b06d] + VTKBase v1.0.1 [64499a7a] + WriteVTK v1.21.2 [f5851436] + FFTW_jll v3.3.11+0 ⌃ [e33a78d0] + Hwloc_jll v2.12.1+0 [1d5cc7b8] + IntelOpenMP_jll v2025.2.0+0 [94ce4f54] + Libiconv_jll v1.18.0+0 [856f044c] + MKL_jll v2025.2.0+0 [7cb0a576] + MPICH_jll v4.3.1+0 [f1f71cc9] + MPItrampoline_jll v5.5.4+0 [9237b28f] + MicrosoftMPI_jll v10.1.4+3 [656ef2d0] + OpenBLAS32_jll v0.3.29+0 [fe0851c0] + OpenMPI_jll v5.0.8+0 [efe28fd5] + OpenSpecFun_jll v0.5.6+0 [8fa3689e] + PETSc_jll v3.22.0+0 [aabda75e] + SCALAPACK32_jll v2.2.1+1 [02c8fc9c] + XML2_jll v2.14.4+0 [1317d2d5] + oneTBB_jll v2022.0.0+0 [0dad84c5] + ArgTools v1.1.2 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [f43a241f] + Downloads v1.7.0 [7b1f6079] + FileWatching v1.11.0 [9fa8497b] + Future v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [ac6e5ff7] + JuliaSyntaxHighlighting v1.12.0 [4af54fe1] + LazyArtifacts v1.11.0 [b27032c2] + LibCURL v0.6.4 [76f85450] + LibGit2 v1.11.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.13.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [a63ad114] + Mmap v1.11.0 [ca575930] + NetworkOptions v1.3.0 [44cfe95a] + Pkg v1.13.0 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v0.7.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.13.0 [f489334b] + StyledStrings v1.11.0 [4607b0f0] + SuiteSparse [fa267f1f] + TOML v1.0.3 [a4e569a6] + Tar v1.10.0 [8dfed614] + Test v1.11.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] + LibCURL_jll v8.16.0+0 [e37daf67] + LibGit2_jll v1.9.1+0 [29816b5a] + LibSSH2_jll v1.11.3+1 [14a3606d] + MozillaCACerts_jll v2025.9.9 [4536629a] + OpenBLAS_jll v0.3.29+0 [05823500] + OpenLibm_jll v0.8.7+0 [458c3c95] + OpenSSL_jll v3.5.2+0 [efcefdf7] + PCRE2_jll v10.46.0+0 [bea87d4a] + SuiteSparse_jll v7.10.1+0 [83775a58] + Zlib_jll v1.3.1+2 [3161d3a3] + Zstd_jll v1.5.7+1 [8e850b90] + libblastrampoline_jll v5.13.1+0 [8e850ede] + nghttp2_jll v1.67.1+0 [3f19e933] + p7zip_jll v17.6.0+0 Info Packages marked with ⌃ and ⌅ have new versions available. Those with ⌃ may be upgradable, but those with ⌅ are restricted by compatibility constraints from upgrading. To see why use `status --outdated -m` Building GridapPETSc → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/aeb0905e921f122e11669e3f10faa318687aa3e0/build.log` Installation completed after 81.62s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling packages... 4592.5 ms ✓ TestEnv 1 dependency successfully precompiled in 5 seconds. 27 already precompiled. Precompiling package dependencies... Precompilation completed after 459.37s ################################################################################ # Testing # Testing GridapPETSc Status `/tmp/jl_XfJMG7/Project.toml` [56d4f2e9] Gridap v0.19.5 [f9701e48] GridapDistributed v0.4.9 [bcdc36c2] GridapPETSc v0.5.6 [da04e1cc] MPI v0.20.23 ⌅ [5a9dfac6] PartitionedArrays v0.3.5 [a0a7dd2c] SparseMatricesCSR v0.6.9 [8fa3689e] PETSc_jll v3.22.0+0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.13.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_XfJMG7/Manifest.toml` [47edcb42] ADTypes v1.18.0 [621f4979] AbstractFFTs v1.5.0 [1520ce14] AbstractTrees v0.4.5 [79e6a3ab] Adapt v4.4.0 [dce04be8] ArgCheck v2.5.0 [4fba245c] ArrayInterface v7.20.0 [4c555306] ArrayLayouts v1.11.2 [15f4f7f2] AutoHashEquals v2.2.0 [fbb218c0] BSON v0.3.9 [8e7c35d0] BlockArrays v1.7.2 [7a955b69] CircularArrays v1.4.0 [944b1d66] CodecZlib v0.7.8 [861a8166] Combinatorics v1.0.3 [bbf7d656] CommonSubexpressions v0.3.1 [187b0558] ConstructionBase v1.6.0 [864edb3b] DataStructures v0.19.1 [163ba53b] DiffResults v1.1.0 [b552c78f] DiffRules v1.15.1 [a0c0ee7d] DifferentiationInterface v0.7.7 [b4f34e82] Distances v0.10.12 [ffbed154] DocStringExtensions v0.9.5 [7a1cc6ca] FFTW v1.10.0 [442a2c76] FastGaussQuadrature v1.0.2 [5789e2e9] FileIO v1.17.0 [1a297f60] FillArrays v1.14.0 [6a86dc24] FiniteDiff v2.28.1 [f6369f11] ForwardDiff v1.2.1 [56d4f2e9] Gridap v0.19.5 [f9701e48] GridapDistributed v0.4.9 [bcdc36c2] GridapPETSc v0.5.6 [076d061b] HashArrayMappedTries v0.2.0 [92d709cd] IrrationalConstants v0.2.4 [42fd0dbc] IterativeSolvers v0.9.4 ⌅ [033835bb] JLD2 v0.5.15 [692b3bcd] JLLWrappers v1.7.1 [682c06a0] JSON v0.21.4 [9c8b4983] LightXML v0.9.2 [d3d80556] LineSearches v7.4.0 [2ab3a3ac] LogExpFunctions v0.3.29 [da04e1cc] MPI v0.20.23 [3da0fdf6] MPIPreferences v0.1.11 [1914dd2f] MacroTools v0.5.16 [d41bc354] NLSolversBase v7.10.0 [2774e3e8] NLsolve v4.5.1 [77ba4419] NaNMath v1.1.3 [b8a86587] NearestNeighbors v0.4.22 [6fe1bfb0] OffsetArrays v1.17.0 [bac558e1] OrderedCollections v1.8.1 [d96e819e] Parameters v0.12.3 [69de0a69] Parsers v2.8.3 ⌅ [5a9dfac6] PartitionedArrays v0.3.5 [eebad327] PkgVersion v0.3.3 [c74db56a] PolynomialBases v0.4.25 [aea7be01] PrecompileTools v1.3.3 [21216c6a] Preferences v1.5.0 [1fd47b50] QuadGK v2.11.2 [3cdcf5f2] RecipesBase v1.3.4 [189a3867] Reexport v1.2.2 [ae029012] Requires v1.3.1 [7e506255] ScopedValues v1.5.0 [efcf1570] Setfield v1.1.2 [ce78b400] SimpleUnPack v1.1.0 [a0a7dd2c] SparseMatricesCSR v0.6.9 [276daf66] SpecialFunctions v2.5.1 [90137ffa] StaticArrays v1.9.15 [1e83bf80] StaticArraysCore v1.4.3 [10745b16] Statistics v1.11.1 [82ae8749] StatsAPI v1.7.1 [3bb67fe8] TranscodingStreams v0.11.3 [3a884ed6] UnPack v1.0.2 [4004b06d] VTKBase v1.0.1 [64499a7a] WriteVTK v1.21.2 [f5851436] FFTW_jll v3.3.11+0 ⌃ [e33a78d0] Hwloc_jll v2.12.1+0 [1d5cc7b8] IntelOpenMP_jll v2025.2.0+0 [94ce4f54] Libiconv_jll v1.18.0+0 [856f044c] MKL_jll v2025.2.0+0 [7cb0a576] MPICH_jll v4.3.1+0 [f1f71cc9] MPItrampoline_jll v5.5.4+0 [9237b28f] MicrosoftMPI_jll v10.1.4+3 [656ef2d0] OpenBLAS32_jll v0.3.29+0 [fe0851c0] OpenMPI_jll v5.0.8+0 [efe28fd5] OpenSpecFun_jll v0.5.6+0 [8fa3689e] PETSc_jll v3.22.0+0 [aabda75e] SCALAPACK32_jll v2.2.1+1 [02c8fc9c] XML2_jll v2.14.4+0 [1317d2d5] oneTBB_jll v2022.0.0+0 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.7.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [ac6e5ff7] JuliaSyntaxHighlighting v1.12.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v0.6.4 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [a63ad114] Mmap v1.11.0 [ca575930] NetworkOptions v1.3.0 [44cfe95a] Pkg v1.13.0 [de0858da] Printf v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v0.7.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.13.0 [f489334b] StyledStrings v1.11.0 [4607b0f0] SuiteSparse [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] LibCURL_jll v8.16.0+0 [e37daf67] LibGit2_jll v1.9.1+0 [29816b5a] LibSSH2_jll v1.11.3+1 [14a3606d] MozillaCACerts_jll v2025.9.9 [4536629a] OpenBLAS_jll v0.3.29+0 [05823500] OpenLibm_jll v0.8.7+0 [458c3c95] OpenSSL_jll v3.5.2+0 [efcefdf7] PCRE2_jll v10.46.0+0 [bea87d4a] SuiteSparse_jll v7.10.1+0 [83775a58] Zlib_jll v1.3.1+2 [3161d3a3] Zstd_jll v1.5.7+1 [8e850b90] libblastrampoline_jll v5.13.1+0 [8e850ede] nghttp2_jll v1.67.1+0 [3f19e933] p7zip_jll v17.6.0+0 Info Packages marked with ⌃ and ⌅ have new versions available. Those with ⌃ may be upgradable, but those with ⌅ are restricted by compatibility constraints from upgrading. Testing Running tests... [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType host available, initializing [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDevice host initialized, default device id 0, view FALSE, init type lazy [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType cuda not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType hip not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType sycl not available [0] PetscInitialize_Common(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-F8v2ZI20.(none) [0] PetscInitialize_Common(): Running on machine: GridapPETSc-against-F8v2ZI20 [0] PetscInitialize_Common(): BLAS: Environment number of OpenBLAS threads 1 given by OPENBLAS_NUM_THREADS [0] PetscBLASSetNumThreads(): Setting number of threads used for OpenBLAS provided BLAS 1 [0] PetscCommDuplicate(): Duplicating a communicator 1140850689 -2080374784 max tags = 1073741823 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-F8v2ZI20.(none) Vec Object: 1 MPI process type: seq 1. 2. 4. 1. [0] PetscCommDuplicate(): Duplicating a communicator 1140850688 -2080374783 max tags = 1073741823 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-F8v2ZI20.(none) Vec Object: 1 MPI process type: seq 1. 2. 4. 1. Vec Object: 1 MPI process type: seq 20. 40. 4. 60. [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 10 unneeded,10 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 3 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 0)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 3 nodes of 4. Limit used: 5. Using Inode routines Mat Object: 1 MPI process type: seqaij row 0: (1, 2.) (3, 3.) (4, 1.) row 1: (1, 6.) (3, 11.) (4, 5.) row 2: (1, 4.) (3, 3.) row 3: (3, 4.) (4, 3.) [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 4; storage space: 0 unneeded,10 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 3 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 0)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 4 nodes out of 4 rows. Not using Inode routines Mat Object: 1 MPI process type: seqaij row 0: (0, 4.) (1, -2.) row 1: (0, -1.) (1, 6.) (2, -2.) row 2: (1, -1.) (2, 6.) (3, -2.) row 3: (2, -1.) (3, 4.) [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] KSPConvergedDefault(): Linear solver has converged. Residual norm 1.249022930744e-16 is less than relative tolerance 1.000000000000e-05 times initial right-hand side norm 1.145643923739e+00 at iteration 4 [0] KSPConvergedDefault(): Linear solver has converged. Residual norm 1.249022930744e-16 is less than relative tolerance 1.000000000000e-05 times initial right-hand side norm 1.145643923739e+00 at iteration 4 [0] KSPConvergedDefault(): Linear solver has converged. Residual norm 1.291406315399e-16 is less than relative tolerance 1.000000000000e-05 times initial right-hand side norm 1.145643923739e+00 at iteration 4 KSP Object: (p_) 1 MPI process type: gmres restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement happy breakdown tolerance 1e-30 maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using PRECONDITIONED norm type for convergence test PC Object: (p_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=4, cols=4 total: nonzeros=10, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines [0] PetscFinalize(): PetscFinalize() called [0] Petsc_OuterComm_Attr_DeleteFn(): Removing reference to PETSc communicator embedded in a user MPI_Comm -2080374783 [0] Petsc_InnerComm_Attr_DeleteFn(): User MPI_Comm 1140850688 is being unlinked from inner PETSc comm -2080374783 [0] PetscCommDestroy(): Deleting PETSc MPI_Comm -2080374783 [0] Petsc_Counter_Attr_DeleteFn(): Deleting counter data in an MPI_Comm -2080374783 [0] Petsc_OuterComm_Attr_DeleteFn(): Removing reference to PETSc communicator embedded in a user MPI_Comm -2080374784 [0] Petsc_InnerComm_Attr_DeleteFn(): User MPI_Comm 1140850689 is being unlinked from inner PETSc comm -2080374784 [0] PetscCommDestroy(): Deleting PETSc MPI_Comm -2080374784 [0] Petsc_Counter_Attr_DeleteFn(): Deleting counter data in an MPI_Comm -2080374784 18.745252 seconds (5.30 M allocations: 305.576 MiB, 5.27% gc time, 41.78% compilation time: 12% of which was recompilation) [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType host available, initializing [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDevice host initialized, default device id 0, view FALSE, init type lazy [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType cuda not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType hip not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType sycl not available [0] PetscInitialize_Common(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-F8v2ZI20.(none) [0] PetscInitialize_Common(): Running on machine: GridapPETSc-against-F8v2ZI20 [0] PetscInitialize_Common(): BLAS: Environment number of OpenBLAS threads 1 given by OPENBLAS_NUM_THREADS [0] PetscBLASSetNumThreads(): Setting number of threads used for OpenBLAS provided BLAS 1 [0] PetscCommDuplicate(): Duplicating a communicator 1140850689 -2080374784 max tags = 1073741823 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 20 unneeded,0 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 0 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 4)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 1 nodes of 4. Limit used: 5. Using Inode routines [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 14 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 1 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 5; storage space: 14 unneeded,2 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 1 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 2)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 4 nodes out of 4 rows. Not using Inode routines 4×5 GridapPETSc.PETScMatrix: 0.0 0.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 7.0 0.0 0.0 0.0 0.0 0.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 3 X 2; storage space: 6 unneeded,0 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 0 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 3) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 1 nodes of 3. Limit used: 5. Using Inode routines [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 4; storage space: 0 unneeded,10 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 3 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 0)/(num_localrows 4) < 0.6. Do not use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 4 nodes out of 4 rows. Not using Inode routines [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] Petsc_OuterComm_Attr_DeleteFn(): Removing reference to PETSc communicator embedded in a user MPI_Comm -2080374784 [0] Petsc_InnerComm_Attr_DeleteFn(): User MPI_Comm 1140850689 is being unlinked from inner PETSc comm -2080374784 [0] PetscCommDestroy(): Deleting PETSc MPI_Comm -2080374784 [0] Petsc_Counter_Attr_DeleteFn(): Deleting counter data in an MPI_Comm -2080374784 [0] PetscFinalize(): PetscFinalize() called 8.477468 seconds (2.82 M allocations: 157.427 MiB, 7.19% gc time, 92.54% compilation time) 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 KSP Object: 1 MPI process type: gmres restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement happy breakdown tolerance 1e-30 maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using DEFAULT norm type for convergence test PC Object: 1 MPI process type: jacobi PC has not been set up so information may be incomplete type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=4, cols=4 total: nonzeros=10, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines ┌ Warning: 1 objects still not finalized before calling GridapPETSc.Finalize() └ @ GridapPETSc ~/.julia/packages/GridapPETSc/l6eIU/src/Environment.jl:45 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 0 KSP Residual norm 2.000000000000e+00 1 KSP Residual norm 4.002966042487e-16 10.095150 seconds (4.79 M allocations: 267.965 MiB, 10.77% gc time, 89.25% compilation time: 17% of which was recompilation) 0 SNES Function norm 3.605551275464e+00 1 SNES Function norm 4.444444444444e-01 2 SNES Function norm 7.111111111111e-02 3 SNES Function norm 3.936947327951e-03 4 SNES Function norm 1.525925473445e-05 5 SNES Function norm 2.328306437081e-10 6 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 6 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 1.486606874732e-01 1 SNES Function norm 8.402777777778e-03 2 SNES Function norm 6.831067663990e-05 3 SNES Function norm 4.665073682466e-09 4 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 4 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 0.000000000000e+00 1 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 1 0 SNES Function norm 3.605551275464e+00 1 SNES Function norm 4.444444444444e-01 2 SNES Function norm 7.111111111111e-02 3 SNES Function norm 3.936947327951e-03 4 SNES Function norm 1.525925473445e-05 5 SNES Function norm 2.328306437081e-10 6 SNES Function norm 0.000000000000e+00 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 6 2.664646 seconds (1.40 M allocations: 76.659 MiB, 18.23% gc time, 81.17% compilation time) [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType host available, initializing [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDevice host initialized, default device id 0, view FALSE, init type lazy [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType cuda not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType hip not available [0] PetscDeviceInitializeTypeFromOptions_Private(): PetscDeviceType sycl not available [0] PetscInitialize_Common(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS GridapPETSc-against-F8v2ZI20.(none) [0] PetscInitialize_Common(): Running on machine: GridapPETSc-against-F8v2ZI20 [0] PetscInitialize_Common(): BLAS: Environment number of OpenBLAS threads 1 given by OPENBLAS_NUM_THREADS [0] PetscBLASSetNumThreads(): Setting number of threads used for OpenBLAS provided BLAS 1 [0] PetscCommDuplicate(): Duplicating a communicator 1140850689 -2080374784 max tags = 1073741823 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 3; storage space: 2 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines 4×3 GridapPETSc.PETScMatrix: -4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 3; storage space: 11 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines 4×3 GridapPETSc.PETScMatrix: -4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] MatAssemblyEnd_SeqAIJ(): Matrix size: 4 X 3; storage space: 0 unneeded,1 used [0] MatAssemblyEnd_SeqAIJ(): Number of mallocs during MatSetValues() is 0 [0] MatAssemblyEnd_SeqAIJ(): Maximum nonzeros in any row is 1 [0] MatCheckCompressedRow(): Found the ratio (num_zerorows 3)/(num_localrows 4) > 0.6. Use CompressedRow routines. [0] MatSeqAIJCheckInode(): Found 2 nodes of 4. Limit used: 5. Using Inode routines 4×3 GridapPETSc.PETScMatrix: -2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] MatConvert(): Calling duplicate for initial matrix seqaij 0 1 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 4-element GridapPETSc.PETScVector: 6.0 0.0 0.0 1.0 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 [0] PetscCommDuplicate(): Using internal PETSc communicator 1140850689 -2080374784 ┌ Warning: 1 objects still not finalized before calling GridapPETSc.Finalize() └ @ GridapPETSc ~/.julia/packages/GridapPETSc/l6eIU/src/Environment.jl:45 [0] PetscFinalize(): PetscFinalize() called 7.495727 seconds (2.43 M allocations: 135.826 MiB, 7.04% gc time, 92.48% compilation time) 0 KSP Residual norm 0.0467654 1 KSP Residual norm 0.015417 2 KSP Residual norm 0.00327706 3 KSP Residual norm 0.000661172 4 KSP Residual norm 0.000252898 5 KSP Residual norm 0.00014497 6 KSP Residual norm 7.44567e-05 7 KSP Residual norm 5.31437e-05 8 KSP Residual norm 3.10604e-05 9 KSP Residual norm 1.88474e-05 10 KSP Residual norm 1.67308e-05 11 KSP Residual norm 1.30481e-05 12 KSP Residual norm 8.92208e-06 13 KSP Residual norm 6.2216e-06 14 KSP Residual norm 5.78002e-06 15 KSP Residual norm 4.40273e-06 16 KSP Residual norm 4.00261e-06 17 KSP Residual norm 2.70432e-06 18 KSP Residual norm 2.34576e-06 19 KSP Residual norm 1.94042e-06 20 KSP Residual norm 1.75753e-06 21 KSP Residual norm 1.73547e-06 22 KSP Residual norm 1.62163e-06 23 KSP Residual norm 1.25495e-06 24 KSP Residual norm 1.01893e-06 25 KSP Residual norm 8.34647e-07 26 KSP Residual norm 6.48088e-07 27 KSP Residual norm 5.14953e-07 28 KSP Residual norm 4.21736e-07 29 KSP Residual norm 3.82565e-07 30 KSP Residual norm 3.11746e-07 31 KSP Residual norm 2.49033e-07 32 KSP Residual norm 2.33299e-07 33 KSP Residual norm 2.07937e-07 34 KSP Residual norm 1.80063e-07 35 KSP Residual norm 1.38409e-07 36 KSP Residual norm 1.20657e-07 37 KSP Residual norm 9.80194e-08 38 KSP Residual norm 7.25573e-08 39 KSP Residual norm 4.91734e-08 40 KSP Residual norm 3.75952e-08 41 KSP Residual norm 2.78836e-08 42 KSP Residual norm 2.23736e-08 43 KSP Residual norm 1.57428e-08 44 KSP Residual norm 1.24789e-08 45 KSP Residual norm 9.28278e-09 46 KSP Residual norm 6.0542e-09 47 KSP Residual norm 4.92096e-09 48 KSP Residual norm 3.61052e-09 49 KSP Residual norm 3.1137e-09 50 KSP Residual norm 1.99343e-09 51 KSP Residual norm 1.768e-09 52 KSP Residual norm 1.32989e-09 53 KSP Residual norm 9.447e-10 54 KSP Residual norm 6.254e-10 55 KSP Residual norm 4.207e-10 Linear solve converged due to CONVERGED_RTOL iterations 55 WARNING! There are options you set that were not used! WARNING! could be spelling mistake, etc! There is one unused database option. It is: Option left: name:-mg_levels_esteig_ksp_type value: cg source: command line 171.183142 seconds (148.26 M allocations: 7.823 GiB, 3.29% gc time, 99.34% compilation time) 0 KSP Residual norm 1.000000000000e+01 1 KSP Residual norm 1.167900528050e-13 Linear solve converged due to CONVERGED_ITS iterations 1 208.283938 seconds (100.96 M allocations: 5.451 GiB, 1.38% gc time, 98.50% compilation time) 0 SNES Function norm 2.409822369424e+06 Linear solve converged due to CONVERGED_RTOL iterations 54 1 SNES Function norm 8.075649416004e+00 Linear solve converged due to CONVERGED_RTOL iterations 39 2 SNES Function norm 4.662170753064e-01 Linear solve converged due to CONVERGED_RTOL iterations 37 3 SNES Function norm 8.399998666394e-03 Linear solve converged due to CONVERGED_RTOL iterations 37 4 SNES Function norm 4.665302403823e-06 Linear solve converged due to CONVERGED_RTOL iterations 44 5 SNES Function norm 1.574127275597e-11 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 0 SNES Function norm 2.409822369424e+06 Linear solve converged due to CONVERGED_RTOL iterations 54 1 SNES Function norm 8.075649416004e+00 Linear solve converged due to CONVERGED_RTOL iterations 39 2 SNES Function norm 4.662170753064e-01 Linear solve converged due to CONVERGED_RTOL iterations 37 3 SNES Function norm 8.399998666394e-03 Linear solve converged due to CONVERGED_RTOL iterations 37 4 SNES Function norm 4.665302403823e-06 Linear solve converged due to CONVERGED_RTOL iterations 44 5 SNES Function norm 1.574127275597e-11 Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 37.536104 seconds (17.31 M allocations: 960.415 MiB, 3.83% gc time, 94.62% compilation time: <1% of which was recompilation) 0 KSP Residual norm 8.550000000000e-03 1 KSP Residual norm 8.968352924103e-04 2 KSP Residual norm 9.188071216687e-05 3 KSP Residual norm 7.483127648168e-06 4 KSP Residual norm 6.216337456339e-07 5 KSP Residual norm 5.262981415328e-08 6 KSP Residual norm 4.447870837457e-09 7 KSP Residual norm 3.660520650081e-10 8 KSP Residual norm 3.000628543014e-11 9 KSP Residual norm 2.545610900010e-12 10 KSP Residual norm 2.084997771778e-13 Linear solve converged due to CONVERGED_RTOL iterations 10 KSP Object: 1 MPI process type: cg maximum iterations=1000, initial guess is zero tolerances: relative=1e-10, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test PC Object: 1 MPI process type: gamg type is MULTIPLICATIVE, levels=3 cycles=v Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options Threshold for dropping small values in graph on each level = -1. -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 0 MatCoarsen Object: (pc_gamg_) 1 MPI process type: misk Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11385 operator = 1.15878 Per-level complexity: op = operator, int = interpolation #equations | #active PEs | avg nnz/row op | avg nnz/row int 7 1 7 0 76 1 32 4 729 1 22 6 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_) 1 MPI process type: bjacobi number of blocks = 1 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks KSP Object: (mg_coarse_sub_) 1 MPI process type: preonly maximum iterations=1, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_sub_) 1 MPI process type: cholesky out-of-place factorization tolerance for zero pivot 2.22045e-14 matrix ordering: nd factor fill ratio given 5., needed 1. Factored matrix follows: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqsbaij rows=7, cols=7 package used to perform factorization: petsc total: nonzeros=28, allocated nonzeros=28 block size is 1 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 linear system matrix = precond matrix: Mat Object: (mg_coarse_sub_) 1 MPI process type: seqaij rows=7, cols=7 total: nonzeros=49, allocated nonzeros=49 total number of mallocs used during MatSetValues calls=0 using I-node routines: found 2 nodes, limit used is 5 Down solver (pre-smoother) on level 1 ------------------------------- KSP Object: (mg_levels_1_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.255929, max 2.81522 eigenvalues provided (min 0.325601, max 2.55929) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_1_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=76, cols=76 total: nonzeros=2432, allocated nonzeros=2432 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) Down solver (pre-smoother) on level 2 ------------------------------- KSP Object: (mg_levels_2_) 1 MPI process type: chebyshev Chebyshev polynomial of first kind eigenvalue targets used: min 0.143003, max 1.57304 eigenvalues provided (min 0.109061, max 1.43003) with transform: [0. 0.1; 0. 1.1] maximum iterations=2, nonzero initial guess tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_levels_2_) 1 MPI process type: jacobi type DIAGONAL linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Up solver (post-smoother) same as down solver (pre-smoother) linear system matrix = precond matrix: Mat Object: 1 MPI process type: seqaij rows=729, cols=729 total: nonzeros=15625, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines 0 KSP Residual norm 8.550000000000e-03 1 KSP Residual norm 8.968352924103e-04 2 KSP Residual norm 9.188071216687e-05 3 KSP Residual norm 7.483127648168e-06 4 KSP Residual norm 6.216337456339e-07 5 KSP Residual norm 5.262981415328e-08 6 KSP Residual norm 4.447870837457e-09 7 KSP Residual norm 3.660520650081e-10 8 KSP Residual norm 3.000628543014e-11 9 KSP Residual norm 2.545610900010e-12 10 KSP Residual norm 2.084997771778e-13 Linear solve converged due to CONVERGED_RTOL iterations 10 KSP Object: 1 MPI process type: cg maximum iterations=1000, initial guess is zero tolerances: relative=1e-10, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test PC Object: 1 MPI process type: gamg type is MULTIPLICATIVE, levels=3 cycles=v Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options Threshold for dropping small values in graph on each level = -1. -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 0 MatCoarsen Object: (pc_gamg_) 1 MPI process type: misk Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11385 operator = 1.15878 Per-level complexity: op = operator, int = interpolation #equations | #active PEs | avg nnz/row op | avg nnz/row int 7 1 7 0 76 1 32 4 729 1 22 6 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (mg_coarse_) 1 MPI process type: bjacobi number of blocks = 1 [0]PETSC ERROR: [0]PETSC ERROR: [0]PETSC ERROR: Testing failed after 499.53s ERROR: LoadError: Package GridapPETSc errored during testing (received signal: 11) Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.13/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.13/Pkg/src/Operations.jl:2673 [3] test @ /opt/julia/share/julia/stdlib/v1.13/Pkg/src/Operations.jl:2522 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:538 [5] kwcall(::@NamedTuple{julia_args::Cmd, io::IOContext{IO}}, ::typeof(Pkg.API.test), ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:515 [6] test(pkgs::Vector{PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:168 [7] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:157 [8] test(pkgs::Vector{String}; kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:156 [9] test @ /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:156 [inlined] [10] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkg::String) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:155 [11] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:219 [12] include(mod::Module, _path::String) @ Base ./Base.jl:309 [13] exec_options(opts::Base.JLOptions) @ Base ./client.jl:330 [14] _start() @ Base ./client.jl:563 in expression starting at /PkgEval.jl/scripts/evaluate.jl:210 PkgEval crashed after 1074.28s: a segmentation fault happened