Package evaluation to test NDTensors on Julia 1.14.0-DEV.1354 (1806b0bc31*) started at 2025-12-11T19:45:58.214 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Activating project at `~/.julia/environments/v1.14` Set-up completed after 8.56s ################################################################################ # Installation # Installing NDTensors... Resolving package versions... Updating `~/.julia/environments/v1.14/Project.toml` [23ae76d9] + NDTensors v0.4.16 Updating `~/.julia/environments/v1.14/Manifest.toml` [7d9f7c33] + Accessors v0.1.43 [79e6a3ab] + Adapt v4.4.0 [dce04be8] + ArgCheck v2.5.0 [4fba245c] + ArrayInterface v7.22.0 [4c555306] + ArrayLayouts v1.12.2 [198e06fe] + BangBang v0.4.6 [9718e550] + Baselet v0.1.1 [8e7c35d0] + BlockArrays v1.9.3 [f70d9fcc] + CommonWorldInvalidations v1.0.0 [34da2185] + Compat v4.18.1 [a33af91c] + CompositionsBase v0.1.2 [187b0558] + ConstructionBase v1.6.0 [9a962f9c] + DataAPI v1.16.0 [e2d170a0] + DataValueInterfaces v1.0.0 [244e2a9f] + DefineSingletons v0.1.2 [85a47980] + Dictionaries v0.4.5 [da5c29d0] + EllipsisNotation v1.8.0 [e2ba6199] + ExprTools v0.1.10 [e189563c] + ExternalDocstrings v0.1.1 [1a297f60] + FillArrays v1.15.0 [41a02a25] + Folds v0.2.10 [d9f16b24] + Functors v0.5.2 [f0d1745a] + HalfIntegers v1.6.0 [615f187c] + IfElse v0.1.1 [313cdc1a] + Indexing v1.1.1 [22cec73e] + InitialValues v0.3.1 [842dd82b] + InlineStrings v1.4.5 [3587e190] + InverseFunctions v0.1.17 [82899510] + IteratorInterfaceExtensions v1.0.0 [1914dd2f] + MacroTools v0.5.16 [128add7d] + MicroCollections v0.2.0 [23ae76d9] + NDTensors v0.4.16 [bac558e1] + OrderedCollections v1.8.1 [65ce6f38] + PackageExtensionCompat v1.0.2 [aea7be01] + PrecompileTools v1.3.3 [21216c6a] + Preferences v1.5.0 [42d2dcc6] + Referenceables v0.1.3 [ae029012] + Requires v1.3.1 [431bcebd] + SciMLPublic v1.0.0 [efcf1570] + Setfield v1.1.2 [699a6c99] + SimpleTraits v0.9.5 [03a91e81] + SplitApplyCombine v1.2.3 [171d559e] + SplittablesBase v0.1.15 [aedffcd0] + Static v1.3.1 [0d7ed370] + StaticArrayInterface v1.8.0 [90137ffa] + StaticArrays v1.9.15 [1e83bf80] + StaticArraysCore v1.4.4 [5e0ebb24] + Strided v2.3.2 [4db3bf67] + StridedViews v0.4.1 [3783bdb8] + TableTraits v1.0.1 [bd369af6] + Tables v1.12.1 [24d252fe] + ThreadedScans v0.1.0 [a759f4b9] + TimerOutputs v0.5.29 [28d57a85] + Transducers v0.4.85 [9d95972d] + TupleTools v1.6.0 [409d34a3] + VectorInterface v0.5.0 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [9fa8497b] + Future v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [ac6e5ff7] + JuliaSyntaxHighlighting v1.13.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.13.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v1.0.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.13.0 [f489334b] + StyledStrings v1.13.0 [fa267f1f] + TOML v1.0.3 [8dfed614] + Test v1.11.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.3.0+1 [4536629a] + OpenBLAS_jll v0.3.29+0 [bea87d4a] + SuiteSparse_jll v7.10.1+0 [8e850b90] + libblastrampoline_jll v5.15.0+0 Installation completed after 3.5s ################################################################################ # Precompilation # ERROR: LoadError: MethodError: no method matching setindex!(::Base.ScopedValues.ScopedValue{IO}, ::Nothing) The function `setindex!` exists, but no method is defined for this combination of argument types. Stacktrace: [1] top-level scope @ /PkgEval.jl/scripts/precompile.jl:10 [2] include(mod::Module, _path::String) @ Base ./Base.jl:309 [3] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [4] _start() @ Base ./client.jl:577 in expression starting at /PkgEval.jl/scripts/precompile.jl:6 caused by: MethodError: no method matching setindex!(::Base.ScopedValues.ScopedValue{IO}, ::Base.DevNull) The function `setindex!` exists, but no method is defined for this combination of argument types. Stacktrace: [1] top-level scope @ /PkgEval.jl/scripts/precompile.jl:7 [2] include(mod::Module, _path::String) @ Base ./Base.jl:309 [3] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [4] _start() @ Base ./client.jl:577 Precompilation failed after 11.17s ################################################################################ # Testing # Testing NDTensors Status `/tmp/jl_7xoGrP/Project.toml` [79e6a3ab] Adapt v4.4.0 [4c555306] ArrayLayouts v1.12.2 [8e7c35d0] BlockArrays v1.9.3 [861a8166] Combinatorics v1.0.3 [85a47980] Dictionaries v0.4.5 [da5c29d0] EllipsisNotation v1.8.0 [1a297f60] FillArrays v1.15.0 [46192b85] GPUArraysCore v0.2.0 [27aeb0d3] JLArrays v0.3.0 [dbb5928d] MappedArrays v0.4.3 [23ae76d9] NDTensors v0.4.16 [6fd5a793] Octavian v0.3.29 [1bc83da4] SafeTestsets v0.1.0 [860ef19b] StableRNGs v1.0.4 [4db3bf67] StridedViews v0.4.1 [6aa20fa7] TensorOperations v5.3.1 [e88e6eb3] Zygote v0.7.10 [37e2e46d] LinearAlgebra v1.13.0 [44cfe95a] Pkg v1.14.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.13.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_7xoGrP/Manifest.toml` [621f4979] AbstractFFTs v1.5.0 [7d9f7c33] Accessors v0.1.43 [79e6a3ab] Adapt v4.4.0 [dce04be8] ArgCheck v2.5.0 [4fba245c] ArrayInterface v7.22.0 [4c555306] ArrayLayouts v1.12.2 [a9b6321e] Atomix v1.1.2 [198e06fe] BangBang v0.4.6 [9718e550] Baselet v0.1.1 [62783981] BitTwiddlingConvenienceFunctions v0.1.6 [8e7c35d0] BlockArrays v1.9.3 [fa961155] CEnum v0.5.0 [2a0fbf3d] CPUSummary v0.2.7 [082447d4] ChainRules v1.72.6 [d360d2e6] ChainRulesCore v1.26.0 [fb6a15b2] CloseOpenIntervals v0.1.13 [861a8166] Combinatorics v1.0.3 [bbf7d656] CommonSubexpressions v0.3.1 [f70d9fcc] CommonWorldInvalidations v1.0.0 [34da2185] Compat v4.18.1 [a33af91c] CompositionsBase v0.1.2 [187b0558] ConstructionBase v1.6.0 [adafc99b] CpuId v0.3.1 [9a962f9c] DataAPI v1.16.0 [e2d170a0] DataValueInterfaces v1.0.0 [244e2a9f] DefineSingletons v0.1.2 [85a47980] Dictionaries v0.4.5 [163ba53b] DiffResults v1.1.0 [b552c78f] DiffRules v1.15.1 [ffbed154] DocStringExtensions v0.9.5 [da5c29d0] EllipsisNotation v1.8.0 [e2ba6199] ExprTools v0.1.10 [e189563c] ExternalDocstrings v0.1.1 [1a297f60] FillArrays v1.15.0 [41a02a25] Folds v0.2.10 [f6369f11] ForwardDiff v1.3.0 [d9f16b24] Functors v0.5.2 [0c68f7d7] GPUArrays v11.3.1 [46192b85] GPUArraysCore v0.2.0 [f0d1745a] HalfIntegers v1.6.0 [076d061b] HashArrayMappedTries v0.2.0 [3e5b6fbb] HostCPUFeatures v0.1.18 [7869d1d1] IRTools v0.4.15 [615f187c] IfElse v0.1.1 [313cdc1a] Indexing v1.1.1 [22cec73e] InitialValues v0.3.1 [842dd82b] InlineStrings v1.4.5 [3587e190] InverseFunctions v0.1.17 [92d709cd] IrrationalConstants v0.2.6 [82899510] IteratorInterfaceExtensions v1.0.0 [27aeb0d3] JLArrays v0.3.0 [692b3bcd] JLLWrappers v1.7.1 [63c18a36] KernelAbstractions v0.9.39 [929cbde3] LLVM v9.4.4 [8ac3fa9e] LRUCache v1.6.2 [10f19ff3] LayoutPointers v0.1.17 [2ab3a3ac] LogExpFunctions v0.3.29 [bdcacae8] LoopVectorization v0.12.173 [1914dd2f] MacroTools v0.5.16 [d125e4d3] ManualMemory v0.1.8 [dbb5928d] MappedArrays v0.4.3 [128add7d] MicroCollections v0.2.0 [23ae76d9] NDTensors v0.4.16 [77ba4419] NaNMath v1.1.3 [6fd5a793] Octavian v0.3.29 [6fe1bfb0] OffsetArrays v1.17.0 [bac558e1] OrderedCollections v1.8.1 [65ce6f38] PackageExtensionCompat v1.0.2 [1d0040c9] PolyesterWeave v0.2.2 [aea7be01] PrecompileTools v1.3.3 [21216c6a] Preferences v1.5.0 [43287f4e] PtrArrays v1.3.0 [c1ae055f] RealDot v0.1.0 [189a3867] Reexport v1.2.2 [42d2dcc6] Referenceables v0.1.3 [ae029012] Requires v1.3.1 [94e857df] SIMDTypes v0.1.0 [476501e8] SLEEFPirates v0.6.43 [1bc83da4] SafeTestsets v0.1.0 [431bcebd] SciMLPublic v1.0.0 [7e506255] ScopedValues v1.5.0 [efcf1570] Setfield v1.1.2 [699a6c99] SimpleTraits v0.9.5 [dc90abb0] SparseInverseSubset v0.1.2 [276daf66] SpecialFunctions v2.6.1 [03a91e81] SplitApplyCombine v1.2.3 [171d559e] SplittablesBase v0.1.15 [860ef19b] StableRNGs v1.0.4 [aedffcd0] Static v1.3.1 [0d7ed370] StaticArrayInterface v1.8.0 [90137ffa] StaticArrays v1.9.15 [1e83bf80] StaticArraysCore v1.4.4 [10745b16] Statistics v1.11.1 [5e0ebb24] Strided v2.3.2 [4db3bf67] StridedViews v0.4.1 [09ab397b] StructArrays v0.7.2 [3783bdb8] TableTraits v1.0.1 [bd369af6] Tables v1.12.1 [6aa20fa7] TensorOperations v5.3.1 [24d252fe] ThreadedScans v0.1.0 [8290d209] ThreadingUtilities v0.5.5 [a759f4b9] TimerOutputs v0.5.29 [28d57a85] Transducers v0.4.85 [9d95972d] TupleTools v1.6.0 [3a884ed6] UnPack v1.0.2 [013be700] UnsafeAtomics v0.3.0 [409d34a3] VectorInterface v0.5.0 [3d5dd08c] VectorizationBase v0.21.72 [e88e6eb3] Zygote v0.7.10 [700de1a5] ZygoteRules v0.2.7 [dad2f222] LLVMExtra_jll v0.0.38+0 [efe28fd5] OpenSpecFun_jll v0.5.6+0 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.7.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [ac6e5ff7] JuliaSyntaxHighlighting v1.13.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v1.0.0 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [ca575930] NetworkOptions v1.3.0 [44cfe95a] Pkg v1.14.0 [de0858da] Printf v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v1.0.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.13.0 [f489334b] StyledStrings v1.13.0 [4607b0f0] SuiteSparse [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] LibCURL_jll v8.17.0+0 [e37daf67] LibGit2_jll v1.9.1+0 [29816b5a] LibSSH2_jll v1.11.3+1 [14a3606d] MozillaCACerts_jll v2025.12.2 [4536629a] OpenBLAS_jll v0.3.29+0 [05823500] OpenLibm_jll v0.8.7+0 [458c3c95] OpenSSL_jll v3.5.4+0 [efcefdf7] PCRE2_jll v10.47.0+0 [bea87d4a] SuiteSparse_jll v7.10.1+0 [83775a58] Zlib_jll v1.3.1+2 [3161d3a3] Zstd_jll v1.5.7+1 [8e850b90] libblastrampoline_jll v5.15.0+0 [8e850ede] nghttp2_jll v1.68.0+1 [3f19e933] p7zip_jll v17.7.0+0 Testing Running tests... Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl ┌ Warning: The call to compilecache failed to create a usable precompiled cache file for JLArrays [27aeb0d3-9eb9-45fb-866b-73c2ecf80fcb] │ exception = Required dependency Base.PkgId(Base.UUID("013be700-e6cd-48c3-b4a1-df204f14c38f"), "UnsafeAtomics") failed to load from a cache file. └ @ Base loading.jl:2891 ERROR: LoadError: Precompiled image Base.PkgId(Base.UUID("27aeb0d3-9eb9-45fb-866b-73c2ecf80fcb"), "JLArrays") not available with flags CacheFlags(; use_pkgimages=false, debug_level=1, check_bounds=0, inline=true, opt_level=0) Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2813  [3] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2665  [4] macro expansion  @ ./loading.jl:2593 [inlined]  [5] macro expansion  @ ./lock.jl:376 [inlined]  [6] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2557  [7] require  @ ./loading.jl:2533 [inlined]  [8] eval_import_path  @ ./module.jl:36 [inlined]  [9] eval_import_path_all(at::Module, path::Expr, keyword::String)  @ Base ./module.jl:60  [10] _eval_import(::Bool, ::Module, ::Expr, ::Expr, ::Vararg{Expr})  @ Base ./module.jl:101  [11] top-level scope  @ ~/.julia/packages/NDTensors/wnM2t/src/vendored/TypeParameterAccessors/ext/TypeParameterAccessorsJLArraysExt.jl:3  [12] include(mapexpr::Function, mod::Module, _path::String)  @ Base ./Base.jl:310  [13] top-level scope  @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsJLArraysExt/NDTensorsJLArraysExt.jl:4  [14] include(mod::Module, _path::String)  @ Base ./Base.jl:309  [15] include_package_for_output(pkg::Base.PkgId, input::String, syntax_version::VersionNumber, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3250  [16] top-level scope  @ stdin:5  [17] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [18] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:3092  [19] include_string  @ ./loading.jl:3102 [inlined]  [20] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:342  [21] _start()  @ Base ./client.jl:577 in expression starting at /home/pkgeval/.julia/packages/NDTensors/wnM2t/src/vendored/TypeParameterAccessors/ext/TypeParameterAccessorsJLArraysExt.jl:1 in expression starting at /home/pkgeval/.julia/packages/NDTensors/wnM2t/ext/NDTensorsJLArraysExt/NDTensorsJLArraysExt.jl:1 in expression starting at stdin:5 1 dependency had output during precompilation: ┌ NDTensors → NDTensorsJLArraysExt │ [Output was shown above] └ ┌ Error: Error during loading of extension NDTensorsJLArraysExt of NDTensors, use `Base.retry_load_extensions()` to retry. │ exception = │ 1-element ExceptionStack: │ The following 1 package failed to precompile: │ │ NDTensorsJLArraysExt │ Failed to precompile NDTensorsJLArraysExt [406ae9ea-2581-5cd8-ad3f-25e2b042a2ec] to "/home/pkgeval/.julia/compiled/v1.14/NDTensorsJLArraysExt/jl_QIBO7y" (ProcessExited(1)). │ └ @ Base loading.jl:1721 svd example 1: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:293 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:294 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:296 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 2: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:301 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:302 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:304 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 3: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:309 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:310 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:312 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 4: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:317 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:318 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:320 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 5: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:325 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::LinearAlgebra.Transpose{Float32, JLArrays.JLArray{Float32, 2}}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::LinearAlgebra.Transpose{Float32, JLArrays.JLArray{Float32, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:32 [21] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:326 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:328 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [49] _start() @ Base ./client.jl:577 svd example 1: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:293 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:294 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:296 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 2: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:301 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:302 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:304 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 3: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:309 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:310 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:312 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 4: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:317 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:318 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:320 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [47] _start() @ Base ./client.jl:577 svd example 5: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:325 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#254 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:28 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:22 [inlined] [13] #_eigen#256 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:65 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:63 [inlined] [15] #eigen#255 @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/symmetriceigen.jl:58 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [20] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:32 [21] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:326 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:328 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [49] _start() @ Base ./client.jl:577 Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float64, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 3}, JLArrays.JLArray{Float64, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, is::Tuple{Vector{Int64}, Vector{Int64}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/blocksparsetensor.jl:601 [14] contract(tensor::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:96 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [38] _start() @ Base ./client.jl:577 BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float64, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 3}, JLArrays.JLArray{Float64, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, is::Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/blocksparsetensor.jl:601 [14] contract(tensor::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:96 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [38] _start() @ Base ./client.jl:577 BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float32, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 3}, JLArrays.JLArray{Float32, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, is::Tuple{Vector{Int64}, Vector{Int64}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/blocksparsetensor.jl:601 [14] contract(tensor::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:96 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [38] _start() @ Base ./client.jl:577 BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float32, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 3}, JLArrays.JLArray{Float32, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, is::Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/blocksparsetensor.jl:601 [14] contract(tensor::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}, Main.var"##NDTensors#141".var"##143".Index{Vector{Pair{Main.var"##NDTensors#141".var"##143".QN, Int64}}}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:42 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:77 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:96 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [38] _start() @ Base ./client.jl:577 Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_dense.jl No permutation: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:238 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 1}, JLArrays.JLArray{ComplexF64, 1}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:6 [10] getindex @ ~/.julia/packages/NDTensors/wnM2t/src/dense/densetensor.jl:98 [inlined] [11] _contract_scalar_maybe_perm!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T₁::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT₁::Tuple{Int64, Int64, Int64}, T₂::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT₂::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:134 [12] _contract_scalar! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:155 [inlined] [13] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:172 [14] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:171 [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:16 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:16 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:238 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:239 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:244 [inlined] [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:1 [26] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [27] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [32] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [33] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [36] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [37] top-level scope @ none:6 [38] eval(m::Module, e::Any) @ Core ./boot.jl:489 [39] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [40] _start() @ Base ./client.jl:577 Permutation: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:249 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 1}, JLArrays.JLArray{ComplexF64, 1}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:6 [10] getindex @ ~/.julia/packages/NDTensors/wnM2t/src/dense/densetensor.jl:98 [inlined] [11] _contract_scalar_maybe_perm!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T₁::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT₁::Tuple{Int64, Int64, Int64}, T₂::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT₂::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:134 [12] _contract_scalar! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:155 [inlined] [13] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:172 [14] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:171 [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:16 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:16 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:238 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:250 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:255 [inlined] [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_dense.jl:1 [26] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [27] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [32] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [33] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [36] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [37] top-level scope @ none:6 [38] eval(m::Module, e::Any) @ Core ./boot.jl:489 [39] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [40] _start() @ Base ./client.jl:577 Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 1}, NDTensors.DiagTensor{Float32, 0, Tuple{}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}}, x::Float32) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{Float32, 0, Tuple{}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}}}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:318 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:315 [inlined] [12] getindex @ ./reshapedarray.jl:303 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/generic.jl:0 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{Float32, 2}, A::JLArrays.JLArray{Float32, 2}, B::LinearAlgebra.Transpose{Float32, Base.ReshapedArray{Float32, 2, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float32, beta::Float32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1139 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1131 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{Float32, 2}, A::JLArrays.JLArray{Float32, 2}, B::LinearAlgebra.Transpose{Float32, Base.ReshapedArray{Float32, 2, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float32, beta::Float32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1125 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1115 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{Float32, 2}, AM::JLArrays.JLArray{Float32, 2}, BM::LinearAlgebra.Transpose{Float32, Base.ReshapedArray{Float32, 2, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::Float32, β::Float32) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{Float32, 3}, AT::JLArrays.JLArray{Float32, 2}, BT::Base.ReshapedArray{Float32, 3, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:213 [27] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [28] #contract!#1 @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:21 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:7 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!!(output_tensor::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [34] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [59] _start() @ Base ./client.jl:577 test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 1}, NDTensors.DiagTensor{ComplexF32, 0, Tuple{}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}}, x::ComplexF32) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{ComplexF32, 0, Tuple{}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}}}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:318 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:315 [inlined] [12] getindex @ ./reshapedarray.jl:303 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/generic.jl:0 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{ComplexF32, 2}, A::JLArrays.JLArray{ComplexF32, 2}, B::LinearAlgebra.Transpose{ComplexF32, Base.ReshapedArray{ComplexF32, 2, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF32, beta::ComplexF32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1139 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1131 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{ComplexF32, 2}, A::JLArrays.JLArray{ComplexF32, 2}, B::LinearAlgebra.Transpose{ComplexF32, Base.ReshapedArray{ComplexF32, 2, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF32, beta::ComplexF32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1125 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1115 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{ComplexF32, 2}, AM::JLArrays.JLArray{ComplexF32, 2}, BM::LinearAlgebra.Transpose{ComplexF32, Base.ReshapedArray{ComplexF32, 2, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::ComplexF32, β::ComplexF32) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{ComplexF32, 3}, AT::JLArrays.JLArray{ComplexF32, 2}, BT::Base.ReshapedArray{ComplexF32, 3, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:213 [27] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [28] #contract!#1 @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:21 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:7 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!!(output_tensor::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [34] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [59] _start() @ Base ./client.jl:577 test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, NDTensors.DiagTensor{Float64, 0, Tuple{}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}, x::Float64) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{Float64, 0, Tuple{}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:318 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:315 [inlined] [12] getindex @ ./reshapedarray.jl:303 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/generic.jl:0 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{Float64, 2}, A::JLArrays.JLArray{Float64, 2}, B::LinearAlgebra.Transpose{Float64, Base.ReshapedArray{Float64, 2, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1139 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1131 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{Float64, 2}, A::JLArrays.JLArray{Float64, 2}, B::LinearAlgebra.Transpose{Float64, Base.ReshapedArray{Float64, 2, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1125 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1115 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{Float64, 2}, AM::JLArrays.JLArray{Float64, 2}, BM::LinearAlgebra.Transpose{Float64, Base.ReshapedArray{Float64, 2, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{Float64, 3}, AT::JLArrays.JLArray{Float64, 2}, BT::Base.ReshapedArray{Float64, 3, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:213 [27] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [28] #contract!#1 @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:21 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:7 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!!(output_tensor::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [34] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [59] _start() @ Base ./client.jl:577 test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 1}, NDTensors.DiagTensor{ComplexF64, 0, Tuple{}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}}, x::ComplexF64) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{ComplexF64, 0, Tuple{}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}}}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:318 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:315 [inlined] [12] getindex @ ./reshapedarray.jl:303 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/generic.jl:0 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{ComplexF64, 2}, A::JLArrays.JLArray{ComplexF64, 2}, B::LinearAlgebra.Transpose{ComplexF64, Base.ReshapedArray{ComplexF64, 2, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF64, beta::ComplexF64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1139 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1131 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{ComplexF64, 2}, A::JLArrays.JLArray{ComplexF64, 2}, B::LinearAlgebra.Transpose{ComplexF64, Base.ReshapedArray{ComplexF64, 2, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF64, beta::ComplexF64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1125 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1115 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{ComplexF64, 2}, AM::JLArrays.JLArray{ComplexF64, 2}, BM::LinearAlgebra.Transpose{ComplexF64, Base.ReshapedArray{ComplexF64, 2, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{ComplexF64, 3}, AT::JLArrays.JLArray{ComplexF64, 2}, BT::Base.ReshapedArray{ComplexF64, 3, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:213 [27] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [28] #contract!#1 @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:21 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:7 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!!(output_tensor::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [34] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [59] _start() @ Base ./client.jl:577 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:116 Test threw exception Expression: contract(A, (-2, 1), t, (-2, 3)) == transpose(A) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _generic_matmatmul_generic!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, B::JLArrays.JLArray{Float64, 2}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1170 [11] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1130 [inlined] [12] _generic_matmatmul!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, B::JLArrays.JLArray{Float64, 2}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1125 [13] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1115 [inlined] [14] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:348 [inlined] [15] _mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:333 [inlined] [16] mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:302 [inlined] [17] mul! @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/mul.jl:2 [inlined] [18] mul!!(CM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, AM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, BM::JLArrays.JLArray{Float64, 2}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/mul.jl:2 [19] _contract!(CT::JLArrays.JLArray{Float64, 2}, AT::JLArrays.JLArray{Float64, 2}, BT::JLArrays.JLArray{Float64, 2}, props::NDTensors.ContractionProperties{2, 2, 2}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/tensoralgebra/contract.jl:174 [20] _contract! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:230 [inlined] [21] contract!(R::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:213 [22] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [23] #contract!#1 @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:21 [inlined] [24] contract! @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:7 [inlined] ┌[25] contract! │ @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:37 [inlined] ╰──── repeated 2 times [27] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [28] _contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [29] contract!!(output_tensor::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsoutput_tensor::Tuple{Int64, Int64}, tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:219 [30] contract!! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [31] contract(tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:113 [32] contract(::Type{NDTensors.CanContract{NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:91 [33] contract(tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [34] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] [35] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:116 [inlined] [36] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [37] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:2066 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:120 Test threw exception Expression: ≈((contract(t, (-1, -2), A, (-1, -2)))[], dot(dev(array(t)), array(A)), rtol = sqrt(eps(elt))) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, JLArrays.JLArray{Float64, 1}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:6 [10] getindex(T::NDTensors.DenseTensor{Float64, 0, Tuple{}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/densetensor.jl:98 [11] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] [12] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:120 [inlined] [13] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [14] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:2066 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:125 Test threw exception Expression: ≈(dot(t, A), dot(dev(array(t)), array(A)), rtol = sqrt(eps(elt))) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, JLArrays.JLArray{Float64, 1}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:6 [10] getindex(T::NDTensors.DenseTensor{Float64, 0, Tuple{}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/densetensor.jl:98 [11] dot(x::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, y::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:28 [12] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:750 [inlined] [13] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:125 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [15] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diag.jl:2066 Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diagblocksparse.jl DiagBlockSparse contract: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diagblocksparse.jl:60 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/generic.jl:0 [inlined] [11] _generic_matmatmul_nonadjtrans!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::JLArrays.JLArray{Float64, 2}, B::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1139 [12] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1131 [inlined] [13] _generic_matmatmul!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::JLArrays.JLArray{Float64, 2}, B::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1125 [14] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:1115 [inlined] [15] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:348 [inlined] [16] _mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/mul.jl:2 [inlined] [19] mul!!(CM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, AM::JLArrays.JLArray{Float64, 2}, BM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/mul.jl:2 [20] _contract!(CT::JLArrays.JLArray{Float64, 2}, AT::JLArrays.JLArray{Float64, 2}, BT::JLArrays.JLArray{Float64, 2}, props::NDTensors.ContractionProperties{2, 2, 2}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/abstractarray/tensoralgebra/contract.jl:174 [21] _contract! @ ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:230 [inlined] [22] contract!(R::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/dense/tensoralgebra/contract.jl:213 [23] contract! @ ~/.julia/packages/NDTensors/wnM2t/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [24] contract!(output_tensor::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}}, labelsoutput_tensor::Tuple{Int64, Int64}, tensor1::NDTensors.Expose.Exposed{Float64, NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}}, labelstensor2::Tuple{Int64, Int64}, α::Float64, β::Float64) @ NDTensorsGPUArraysCoreExt ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:61 [25] contract! @ ~/.julia/packages/NDTensors/wnM2t/ext/NDTensorsGPUArraysCoreExt/contract.jl:77 [inlined] [26] contract!(R::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DiagBlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.DiagBlockSparse{Float64, Float64, 2}}, labelsT2::Tuple{Int64, Int64}, contraction_plan::Vector{Tuple{NDTensors.Block{2}, NDTensors.Block{2}, NDTensors.Block{2}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/diagblocksparse.jl:671 [27] contract(T1::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DiagBlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.DiagBlockSparse{Float64, Float64, 2}}, labelsT2::Tuple{Int64, Int64}, labelsR::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/diagblocksparse.jl:621 [28] contract(T1::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DiagBlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.DiagBlockSparse{Float64, Float64, 2}}, labelsT2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/blocksparse/diagblocksparse.jl:620 [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_diagblocksparse.jl:67 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diagblocksparse.jl:2066 [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_diagblocksparse.jl:1 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [35] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [36] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [37] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [38] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [39] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [40] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [41] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [42] eval(m::Module, e::Any) @ Core ./boot.jl:489 [43] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [44] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [45] top-level scope @ none:6 [46] eval(m::Module, e::Any) @ Core ./boot.jl:489 [47] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [48] _start() @ Base ./client.jl:577 Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_emptynumber.jl Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_emptystorage.jl Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl Dense qr decomposition, elt=Float64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=Float32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [26] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense qr decomposition, elt=ComplexF32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:109 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:92 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/XrlLf/src/JLArrays.jl:139 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.14/LinearAlgebra/src/abstractq.jl:52 [2] qr_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [28] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=Float32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [36] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Dense ql decomposition, elt=ComplexF32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/wnM2t/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [37] _start() @ Base ./client.jl:577 Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_tupletools.jl Running /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl NDTensors.BackendSelection.Algorithm type , NamedTuple() Testing Expose jl, Float32: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:23 Got exception outside of a @test Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:58 [inlined] [6] scalar_setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:42 [inlined] [7] _setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:26 [inlined] [8] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:24 [inlined] [9] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 1}, JLArrays.JLArray{Float32, 1}}, x::Int64) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:9 [10] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:62 [inlined] [11] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [12] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:2066 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:1 [15] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl:12 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl:2066 [19] eval(m::Module, e::Any) @ Core ./boot.jl:489 [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl:1 [21] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [22] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [31] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [32] top-level scope @ none:6 [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [35] _start() @ Base ./client.jl:577 Testing Expose jl, ComplexF32: Error During Test at /home/pkgeval/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:23 Got exception outside of a @test Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:58 [inlined] [6] scalar_setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:42 [inlined] [7] _setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:26 [inlined] [8] setindex! @ ~/.julia/packages/GPUArrays/0F4Dn/src/host/indexing.jl:24 [inlined] [9] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 1}, JLArrays.JLArray{ComplexF32, 1}}, x::Int64) @ NDTensors.Expose ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/src/functions/abstractarray.jl:9 [10] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:62 [inlined] [11] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [12] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:2066 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/src/lib/Expose/test/runtests.jl:1 [15] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [16] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl:12 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [18] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl:2066 [19] eval(m::Module, e::Any) @ Core ./boot.jl:489 [20] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl:1 [21] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [22] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [24] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:7 [inlined] [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:1961 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:13 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.14/Test/src/Test.jl:2042 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:15 [inlined] [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] top-level scope @ ~/.julia/packages/NDTensors/wnM2t/test/runtests.jl:28 [31] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:310 [32] top-level scope @ none:6 [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] exec_options(opts::Base.JLOptions) @ Base ./client.jl:310 [35] _start() @ Base ./client.jl:577 Test Summary: | Pass Error Total Time NDTensors | 2400 62 2462 16m27.4s /home/pkgeval/.julia/packages/NDTensors/wnM2t/test | 2400 62 2462 15m50.3s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_blocksparse.jl | 1015 10 1025 3m46.0s BlockSparseTensor basic functionality | 1015 10 1025 2m35.1s test device: cpu, eltype: Float32 | 230 230 19.3s test device: cpu, eltype: Float64 | 230 230 12.9s test device: jl, eltype: Float32 | 230 230 38.5s test device: jl, eltype: Float64 | 230 230 32.5s BlockSparseTensor setindex! add block | 79 79 0.1s svd on cpu, eltype: Float32 | 5 5 11.4s svd on cpu, eltype: Float64 | 5 5 9.7s svd on jl, eltype: Float32 | 5 5 10.0s svd example 1 | 1 1 9.2s svd example 2 | 1 1 0.1s svd example 3 | 1 1 0.1s svd example 4 | 1 1 0.1s svd example 5 | 1 1 0.5s svd on jl, eltype: Float64 | 5 5 5.3s svd example 1 | 1 1 4.5s svd example 2 | 1 1 0.1s svd example 3 | 1 1 0.1s svd example 4 | 1 1 0.1s svd example 5 | 1 1 0.6s exp, eltype: Float32 | 3 3 9.3s exp, eltype: Float64 | 3 3 5.9s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_combiner.jl | 96 4 100 53.4s CombinerTensor basic functionality | 96 4 100 53.2s test device: cpu, eltype: Float64 | 36 36 24.3s test device: cpu, eltype: Float32 | 36 36 13.8s test device: jl, eltype: Float64 | 12 2 14 8.8s Dense * Combiner | 12 12 4.2s BlockSparse * Combiner | 1 1 2.9s BlockSparse * Combiner | 1 1 1.7s test device: jl, eltype: Float32 | 12 2 14 6.1s Dense * Combiner | 12 12 2.0s BlockSparse * Combiner | 1 1 2.4s BlockSparse * Combiner | 1 1 1.7s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_dense.jl | 545 2 547 3m10.2s Dense Tensors | 545 2 547 3m09.8s test device: cpu | 262 262 18.0s test device: jl | 258 2 260 39.2s DenseTensor basic functionality | 119 119 14.1s Random constructor | 8 8 0.1s Complex Valued Tensors | 72 72 0.1s Custom inds types | 21 21 0.8s generic contraction | 36 36 0.0s Contraction with size 1 block and NaN | 2 2 4 24.1s No permutation | 1 1 2 24.0s Permutation | 1 1 2 0.1s Contract with exotic types | 14 14 15.8s change backends | 6 6 0.9s change backends | 5 5 1m55.6s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diag.jl | 255 11 266 5m21.9s DiagTensor basic functionality | 240 8 248 4m46.9s test device: cpu | 31 31 21.9s test device: cpu | 31 31 19.9s test device: cpu | 31 31 17.5s test device: cpu | 31 31 16.1s test device: jl | 29 2 31 49.2s test device: jl | 29 2 31 1m00.8s test device: jl | 29 2 31 56.2s test device: jl | 29 2 31 45.0s DiagTensor contractions | 9 9 8.5s DiagTensor contractions | 6 3 9 24.3s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_diagblocksparse.jl | 28 1 29 1m04.5s UniformDiagBlockSparseTensor basic functionality | 10 10 0.9s DiagBlockSparse off-diagonal (eltype=Float32) | 2 2 10.9s DiagBlockSparse off-diagonal (eltype=Float64) | 2 2 7.2s DiagBlockSparse off-diagonal (eltype=ComplexF32) | 2 2 9.7s DiagBlockSparse off-diagonal (eltype=ComplexF64) | 2 2 11.4s DiagBlockSparse contract | 6 6 7.0s DiagBlockSparse contract | 1 1 4.3s UniformDiagBlockSparse norm | 2 2 0.0s DiagBlockSparse denseblocks | 2 2 0.2s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_emptynumber.jl | 28 28 0.8s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_emptystorage.jl | 24 24 2.1s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_linearalgebra.jl | 230 32 262 51.3s random_orthog | 3 3 1.0s random_unitary | 3 3 1.2s QX testing | 224 32 256 47.1s Dense qr decomposition, elt=Float64, positve=false, singular=false, device=cpu | 5 5 1.3s Dense qr decomposition, elt=Float64, positve=false, singular=false, device=jl | 1 1 3.4s Dense qr decomposition, elt=Float64, positve=false, singular=true, device=cpu | 5 5 0.3s Dense qr decomposition, elt=Float64, positve=false, singular=true, device=jl | 1 1 0.9s Dense qr decomposition, elt=Float64, positve=true, singular=false, device=cpu | 9 9 1.9s Dense qr decomposition, elt=Float64, positve=true, singular=false, device=jl | 1 1 0.2s Dense qr decomposition, elt=Float64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=Float64, positve=true, singular=true, device=jl | 1 1 0.2s Dense qr decomposition, elt=ComplexF64, positve=false, singular=false, device=cpu | 5 5 1.8s Dense qr decomposition, elt=ComplexF64, positve=false, singular=false, device=jl | 1 1 2.7s Dense qr decomposition, elt=ComplexF64, positve=false, singular=true, device=cpu | 5 5 0.3s Dense qr decomposition, elt=ComplexF64, positve=false, singular=true, device=jl | 1 1 0.7s Dense qr decomposition, elt=ComplexF64, positve=true, singular=false, device=cpu | 9 9 1.1s Dense qr decomposition, elt=ComplexF64, positve=true, singular=false, device=jl | 1 1 0.2s Dense qr decomposition, elt=ComplexF64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=ComplexF64, positve=true, singular=true, device=jl | 1 1 0.2s Dense qr decomposition, elt=Float32, positve=false, singular=false, device=cpu | 5 5 2.0s Dense qr decomposition, elt=Float32, positve=false, singular=false, device=jl | 1 1 1.5s Dense qr decomposition, elt=Float32, positve=false, singular=true, device=cpu | 5 5 0.4s Dense qr decomposition, elt=Float32, positve=false, singular=true, device=jl | 1 1 0.7s Dense qr decomposition, elt=Float32, positve=true, singular=false, device=cpu | 9 9 1.7s Dense qr decomposition, elt=Float32, positve=true, singular=false, device=jl | 1 1 0.2s Dense qr decomposition, elt=Float32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=Float32, positve=true, singular=true, device=jl | 1 1 0.2s Dense qr decomposition, elt=ComplexF32, positve=false, singular=false, device=cpu | 5 5 2.8s Dense qr decomposition, elt=ComplexF32, positve=false, singular=false, device=jl | 1 1 2.6s Dense qr decomposition, elt=ComplexF32, positve=false, singular=true, device=cpu | 5 5 0.3s Dense qr decomposition, elt=ComplexF32, positve=false, singular=true, device=jl | 1 1 0.7s Dense qr decomposition, elt=ComplexF32, positve=true, singular=false, device=cpu | 9 9 1.1s Dense qr decomposition, elt=ComplexF32, positve=true, singular=false, device=jl | 1 1 0.2s Dense qr decomposition, elt=ComplexF32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=ComplexF32, positve=true, singular=true, device=jl | 1 1 0.2s Dense ql decomposition, elt=Float64, positve=false, singular=false, device=cpu | 5 5 0.8s Dense ql decomposition, elt=Float64, positve=false, singular=false, device=jl | 1 1 4.6s Dense ql decomposition, elt=Float64, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=Float64, positve=false, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=false, device=jl | 1 1 0.1s Dense ql decomposition, elt=Float64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=true, device=jl | 1 1 0.1s Dense ql decomposition, elt=ComplexF64, positve=false, singular=false, device=cpu | 5 5 0.7s Dense ql decomposition, elt=ComplexF64, positve=false, singular=false, device=jl | 1 1 3.6s Dense ql decomposition, elt=ComplexF64, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=ComplexF64, positve=false, singular=true, device=jl | 1 1 0.1s Dense ql decomposition, elt=ComplexF64, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF64, positve=true, singular=false, device=jl | 1 1 0.1s Dense ql decomposition, elt=ComplexF64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF64, positve=true, singular=true, device=jl | 1 1 0.1s Dense ql decomposition, elt=Float32, positve=false, singular=false, device=cpu | 5 5 0.9s Dense ql decomposition, elt=Float32, positve=false, singular=false, device=jl | 1 1 3.0s Dense ql decomposition, elt=Float32, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=Float32, positve=false, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=false, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF32, positve=false, singular=false, device=cpu | 5 5 0.5s Dense ql decomposition, elt=ComplexF32, positve=false, singular=false, device=jl | 1 1 2.7s Dense ql decomposition, elt=ComplexF32, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=ComplexF32, positve=false, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=false, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=true, device=jl | 1 1 0.0s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/test_tupletools.jl | 7 7 0.2s Test /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/lib/runtests.jl | 172 2 174 40.0s Test NDTensors lib AMDGPUExtensions | 2 2 0.1s Test NDTensors lib BackendSelection | 12 12 0.7s Test NDTensors lib CUDAExtensions | 2 2 0.0s Test NDTensors lib GPUArraysCoreExtensions | 1 1 0.0s Test NDTensors lib MetalExtensions | 1 1 0.0s Test NDTensors lib Expose | 154 2 156 39.1s Testing Expose cpu, Float32 | 64 64 13.1s Testing Expose cpu, ComplexF32 | 64 64 14.7s Testing Expose jl, Float32 | 13 1 14 3.7s Testing Expose jl, ComplexF32 | 13 1 14 2.4s RNG of the outermost testset: Random.Xoshiro(0x14fa76b169505802, 0x95771a696efc61be, 0xecf5a9976f17f525, 0x2e46d208cc4b87ae, 0xba1bd42077cdfcb7) ERROR: LoadError: Some tests did not pass: 2400 passed, 0 failed, 62 errored, 0 broken. in expression starting at /home/pkgeval/.julia/packages/NDTensors/wnM2t/test/runtests.jl:3 Testing failed after 989.49s ERROR: LoadError: Package NDTensors errored during testing Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Operations.jl:3053 [3] test @ /opt/julia/share/julia/stdlib/v1.14/Pkg/src/Operations.jl:2902 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:572 [5] kwcall(::@NamedTuple{julia_args::Cmd, io::IOContext{IO}}, ::typeof(Pkg.API.test), ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:548 [6] test(pkgs::Vector{PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:172 [7] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:161 [8] test(pkgs::Vector{String}; kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:160 [9] test @ /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:160 [inlined] [10] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkg::String) @ Pkg.API /opt/julia/share/julia/stdlib/v1.14/Pkg/src/API.jl:159 [11] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:237 [12] include(mod::Module, _path::String) @ Base ./Base.jl:309 [13] exec_options(opts::Base.JLOptions) @ Base ./client.jl:344 [14] _start() @ Base ./client.jl:577 in expression starting at /PkgEval.jl/scripts/evaluate.jl:228 PkgEval failed after 1050.02s: package fails to precompile