Package evaluation of NDTensors on Julia 1.13.0-DEV.994 (f4af91db49*) started at 2025-08-17T22:12:18.516 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Set-up completed after 8.68s ################################################################################ # Installation # Installing NDTensors... Resolving package versions... Updating `~/.julia/environments/v1.13/Project.toml` [23ae76d9] + NDTensors v0.4.9 Updating `~/.julia/environments/v1.13/Manifest.toml` [7d9f7c33] + Accessors v0.1.42 [79e6a3ab] + Adapt v4.3.0 [dce04be8] + ArgCheck v2.5.0 [4fba245c] + ArrayInterface v7.19.0 [4c555306] + ArrayLayouts v1.11.2 [198e06fe] + BangBang v0.4.4 [9718e550] + Baselet v0.1.1 [8e7c35d0] + BlockArrays v1.7.0 [f70d9fcc] + CommonWorldInvalidations v1.0.0 [34da2185] + Compat v4.18.0 [a33af91c] + CompositionsBase v0.1.2 [187b0558] + ConstructionBase v1.6.0 [9a962f9c] + DataAPI v1.16.0 [e2d170a0] + DataValueInterfaces v1.0.0 [244e2a9f] + DefineSingletons v0.1.2 [85a47980] + Dictionaries v0.4.5 [da5c29d0] + EllipsisNotation v1.8.0 [e2ba6199] + ExprTools v0.1.10 [e189563c] + ExternalDocstrings v0.1.1 [1a297f60] + FillArrays v1.13.0 [41a02a25] + Folds v0.2.10 [d9f16b24] + Functors v0.5.2 [f0d1745a] + HalfIntegers v1.6.0 [615f187c] + IfElse v0.1.1 [313cdc1a] + Indexing v1.1.1 [22cec73e] + InitialValues v0.3.1 [842dd82b] + InlineStrings v1.4.4 [3587e190] + InverseFunctions v0.1.17 [82899510] + IteratorInterfaceExtensions v1.0.0 [1914dd2f] + MacroTools v0.5.16 [128add7d] + MicroCollections v0.2.0 [23ae76d9] + NDTensors v0.4.9 [bac558e1] + OrderedCollections v1.8.1 [65ce6f38] + PackageExtensionCompat v1.0.2 [aea7be01] + PrecompileTools v1.3.2 [21216c6a] + Preferences v1.5.0 [42d2dcc6] + Referenceables v0.1.3 [ae029012] + Requires v1.3.1 [efcf1570] + Setfield v1.1.2 [699a6c99] + SimpleTraits v0.9.5 [03a91e81] + SplitApplyCombine v1.2.3 [171d559e] + SplittablesBase v0.1.15 [aedffcd0] + Static v1.2.0 [0d7ed370] + StaticArrayInterface v1.8.0 [90137ffa] + StaticArrays v1.9.14 [1e83bf80] + StaticArraysCore v1.4.3 [5e0ebb24] + Strided v2.3.2 [4db3bf67] + StridedViews v0.4.1 [3783bdb8] + TableTraits v1.0.1 [bd369af6] + Tables v1.12.1 [24d252fe] + ThreadedScans v0.1.0 [a759f4b9] + TimerOutputs v0.5.29 [28d57a85] + Transducers v0.4.84 [9d95972d] + TupleTools v1.6.0 ⌅ [7e5a90cf] + TypeParameterAccessors v0.3.10 [409d34a3] + VectorInterface v0.5.0 [56f22d72] + Artifacts v1.11.0 [2a0f44e3] + Base64 v1.11.0 [ade2ca70] + Dates v1.11.0 [8ba89e20] + Distributed v1.11.0 [9fa8497b] + Future v1.11.0 [b77e0a4c] + InteractiveUtils v1.11.0 [ac6e5ff7] + JuliaSyntaxHighlighting v1.12.0 [8f399da3] + Libdl v1.11.0 [37e2e46d] + LinearAlgebra v1.13.0 [56ddb016] + Logging v1.11.0 [d6f4376e] + Markdown v1.11.0 [de0858da] + Printf v1.11.0 [9a3f8284] + Random v1.11.0 [ea8e919c] + SHA v0.7.0 [9e88b42a] + Serialization v1.11.0 [6462fe0b] + Sockets v1.11.0 [2f01184e] + SparseArrays v1.13.0 [f489334b] + StyledStrings v1.11.0 [fa267f1f] + TOML v1.0.3 [8dfed614] + Test v1.11.0 [cf7118a7] + UUIDs v1.11.0 [4ec0a83e] + Unicode v1.11.0 [e66e0078] + CompilerSupportLibraries_jll v1.3.0+1 [4536629a] + OpenBLAS_jll v0.3.29+0 [bea87d4a] + SuiteSparse_jll v7.10.1+0 [8e850b90] + libblastrampoline_jll v5.13.1+0 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. To see why use `status --outdated -m` Installation completed after 3.7s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling package dependencies... ERROR: LoadError: The following 4 direct dependencies failed to precompile: Octavian Failed to precompile Octavian [6fd5a793-0b7e-452c-907f-f8bfe9c57db4] to "/home/pkgeval/.julia/compiled/v1.13/Octavian/jl_gJJ2RF" (ProcessExited(1)). ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` Suggestion: check for spelling errors or missing imports. Stacktrace: [1] getproperty @ ./Base_compiler.jl:50 [inlined] [2] recompile_invalidations(__module__::Module, expr::Any) @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 [3] top-level scope @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 [4] include(mod::Module, _path::String) @ Base ./Base.jl:308 [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [6] top-level scope @ stdin:5 [7] eval(m::Module, e::Any) @ Core ./boot.jl:489 [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [9] include_string @ ./loading.jl:2873 [inlined] [10] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [11] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_aGVAQ2" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] _eval_import(imported::Bool, to::Module, from::Nothing, paths::Expr) @ Base ./module.jl:111 [20] top-level scope @ ~/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:6 [21] include(mod::Module, _path::String) @ Base ./Base.jl:308 [22] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [23] top-level scope @ stdin:5 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [26] include_string @ ./loading.jl:2873 [inlined] [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [28] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile VectorizationBase [3d5dd08c-fd9d-11e8-17fa-ed2836048c2f] to "/home/pkgeval/.julia/compiled/v1.13/VectorizationBase/jl_hS7Cyj" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [21] top-level scope @ ~/.julia/packages/Octavian/4f4xi/src/Octavian.jl:3 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/Octavian/4f4xi/src/Octavian.jl:1 in expression starting at stdin:5 EllipsisNotation Failed to precompile EllipsisNotation [da5c29d0-fa7d-589e-88eb-ea29b0a81949] to "/home/pkgeval/.julia/compiled/v1.13/EllipsisNotation/jl_5k1Ppc" (ProcessExited(1)). ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` Suggestion: check for spelling errors or missing imports. Stacktrace: [1] getproperty @ ./Base_compiler.jl:50 [inlined] [2] recompile_invalidations(__module__::Module, expr::Any) @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 [3] top-level scope @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 [4] include(mod::Module, _path::String) @ Base ./Base.jl:308 [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [6] top-level scope @ stdin:5 [7] eval(m::Module, e::Any) @ Core ./boot.jl:489 [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [9] include_string @ ./loading.jl:2873 [inlined] [10] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [11] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_KhFxD2" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [21] top-level scope @ ~/.julia/packages/EllipsisNotation/duIu5/src/EllipsisNotation.jl:5 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/EllipsisNotation/duIu5/src/EllipsisNotation.jl:3 in expression starting at stdin:5 NDTensorsOctavianExt Failed to precompile NDTensorsOctavianExt [8476096c-a533-58b7-8ba4-cf175432e973] to "/home/pkgeval/.julia/compiled/v1.13/NDTensorsOctavianExt/jl_HWXkmY" (ProcessExited(1)). ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` Suggestion: check for spelling errors or missing imports. Stacktrace: [1] getproperty @ ./Base_compiler.jl:50 [inlined] [2] recompile_invalidations(__module__::Module, expr::Any) @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 [3] top-level scope @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 [4] include(mod::Module, _path::String) @ Base ./Base.jl:308 [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [6] top-level scope @ stdin:5 [7] eval(m::Module, e::Any) @ Core ./boot.jl:489 [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [9] include_string @ ./loading.jl:2873 [inlined] [10] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [11] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_RxTLeP" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] _eval_import(imported::Bool, to::Module, from::Nothing, paths::Expr) @ Base ./module.jl:111 [20] top-level scope @ ~/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:6 [21] include(mod::Module, _path::String) @ Base ./Base.jl:308 [22] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [23] top-level scope @ stdin:5 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [26] include_string @ ./loading.jl:2873 [inlined] [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [28] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile VectorizationBase [3d5dd08c-fd9d-11e8-17fa-ed2836048c2f] to "/home/pkgeval/.julia/compiled/v1.13/VectorizationBase/jl_LYcRTg" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [21] top-level scope @ ~/.julia/packages/Octavian/4f4xi/src/Octavian.jl:3 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/Octavian/4f4xi/src/Octavian.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile Octavian [6fd5a793-0b7e-452c-907f-f8bfe9c57db4] to "/home/pkgeval/.julia/compiled/v1.13/Octavian/jl_fJmht5" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/ext/NDTensorsOctavianExt/NDTensorsOctavianExt.jl:4 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/NDTensors/Lb78J/ext/NDTensorsOctavianExt/NDTensorsOctavianExt.jl:1 in expression starting at stdin:5 ForwardDiffExt Failed to precompile ForwardDiffExt [a944ce4c-21c9-5cba-ad06-a0b2649de6d3] to "/home/pkgeval/.julia/compiled/v1.13/ForwardDiffExt/jl_xM68lD" (ProcessExited(1)). ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` Suggestion: check for spelling errors or missing imports. Stacktrace: [1] getproperty @ ./Base_compiler.jl:50 [inlined] [2] recompile_invalidations(__module__::Module, expr::Any) @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 [3] top-level scope @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 [4] include(mod::Module, _path::String) @ Base ./Base.jl:308 [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [6] top-level scope @ stdin:5 [7] eval(m::Module, e::Any) @ Core ./boot.jl:489 [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [9] include_string @ ./loading.jl:2873 [inlined] [10] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [11] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_QUWMhi" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] _eval_import(imported::Bool, to::Module, from::Nothing, paths::Expr) @ Base ./module.jl:111 [20] top-level scope @ ~/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:6 [21] include(mod::Module, _path::String) @ Base ./Base.jl:308 [22] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [23] top-level scope @ stdin:5 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [26] include_string @ ./loading.jl:2873 [inlined] [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [28] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile VectorizationBase [3d5dd08c-fd9d-11e8-17fa-ed2836048c2f] to "/home/pkgeval/.julia/compiled/v1.13/VectorizationBase/jl_GWguXD" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [21] top-level scope @ ~/.julia/packages/Octavian/4f4xi/src/Octavian.jl:3 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/Octavian/4f4xi/src/Octavian.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile Octavian [6fd5a793-0b7e-452c-907f-f8bfe9c57db4] to "/home/pkgeval/.julia/compiled/v1.13/Octavian/jl_4wtpv9" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_import(::Bool, ::Module, ::Expr, ::Expr, ::Vararg{Expr}) @ Base ./module.jl:101 [21] top-level scope @ ~/.julia/packages/Octavian/4f4xi/ext/ForwardDiffExt.jl:5 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/Octavian/4f4xi/ext/ForwardDiffExt.jl:1 in expression starting at stdin:5 in expression starting at /PkgEval.jl/scripts/precompile.jl:37 Precompilation failed after 210.47s ################################################################################ # Testing # Testing NDTensors Status `/tmp/jl_sfU4zt/Project.toml` [79e6a3ab] Adapt v4.3.0 [4c555306] ArrayLayouts v1.11.2 [8e7c35d0] BlockArrays v1.7.0 [861a8166] Combinatorics v1.0.3 [85a47980] Dictionaries v0.4.5 [da5c29d0] EllipsisNotation v1.8.0 [1a297f60] FillArrays v1.13.0 [46192b85] GPUArraysCore v0.2.0 [27aeb0d3] JLArrays v0.2.0 [dbb5928d] MappedArrays v0.4.2 [23ae76d9] NDTensors v0.4.9 [6fd5a793] Octavian v0.3.29 [1bc83da4] SafeTestsets v0.1.0 [860ef19b] StableRNGs v1.0.3 [4db3bf67] StridedViews v0.4.1 [6aa20fa7] TensorOperations v5.3.0 ⌅ [7e5a90cf] TypeParameterAccessors v0.3.10 [e88e6eb3] Zygote v0.7.10 [37e2e46d] LinearAlgebra v1.13.0 [44cfe95a] Pkg v1.13.0 [9a3f8284] Random v1.11.0 [2f01184e] SparseArrays v1.13.0 [8dfed614] Test v1.11.0 Status `/tmp/jl_sfU4zt/Manifest.toml` [621f4979] AbstractFFTs v1.5.0 [7d9f7c33] Accessors v0.1.42 [79e6a3ab] Adapt v4.3.0 [dce04be8] ArgCheck v2.5.0 [4fba245c] ArrayInterface v7.19.0 [4c555306] ArrayLayouts v1.11.2 [a9b6321e] Atomix v1.1.2 [198e06fe] BangBang v0.4.4 [9718e550] Baselet v0.1.1 [62783981] BitTwiddlingConvenienceFunctions v0.1.6 [8e7c35d0] BlockArrays v1.7.0 [fa961155] CEnum v0.5.0 [2a0fbf3d] CPUSummary v0.2.7 [082447d4] ChainRules v1.72.5 [d360d2e6] ChainRulesCore v1.26.0 [fb6a15b2] CloseOpenIntervals v0.1.13 [861a8166] Combinatorics v1.0.3 [bbf7d656] CommonSubexpressions v0.3.1 [f70d9fcc] CommonWorldInvalidations v1.0.0 [34da2185] Compat v4.18.0 [a33af91c] CompositionsBase v0.1.2 [187b0558] ConstructionBase v1.6.0 [adafc99b] CpuId v0.3.1 [9a962f9c] DataAPI v1.16.0 [e2d170a0] DataValueInterfaces v1.0.0 [244e2a9f] DefineSingletons v0.1.2 [85a47980] Dictionaries v0.4.5 [163ba53b] DiffResults v1.1.0 [b552c78f] DiffRules v1.15.1 [ffbed154] DocStringExtensions v0.9.5 [da5c29d0] EllipsisNotation v1.8.0 [e2ba6199] ExprTools v0.1.10 [e189563c] ExternalDocstrings v0.1.1 [1a297f60] FillArrays v1.13.0 [41a02a25] Folds v0.2.10 [f6369f11] ForwardDiff v1.0.1 [d9f16b24] Functors v0.5.2 [0c68f7d7] GPUArrays v11.2.3 [46192b85] GPUArraysCore v0.2.0 [f0d1745a] HalfIntegers v1.6.0 [076d061b] HashArrayMappedTries v0.2.0 [3e5b6fbb] HostCPUFeatures v0.1.17 [7869d1d1] IRTools v0.4.15 [615f187c] IfElse v0.1.1 [313cdc1a] Indexing v1.1.1 [22cec73e] InitialValues v0.3.1 [842dd82b] InlineStrings v1.4.4 [3587e190] InverseFunctions v0.1.17 [92d709cd] IrrationalConstants v0.2.4 [82899510] IteratorInterfaceExtensions v1.0.0 [27aeb0d3] JLArrays v0.2.0 [692b3bcd] JLLWrappers v1.7.1 [63c18a36] KernelAbstractions v0.9.38 [929cbde3] LLVM v9.4.2 [8ac3fa9e] LRUCache v1.6.2 [10f19ff3] LayoutPointers v0.1.17 [2ab3a3ac] LogExpFunctions v0.3.29 [bdcacae8] LoopVectorization v0.12.172 [1914dd2f] MacroTools v0.5.16 [d125e4d3] ManualMemory v0.1.8 [dbb5928d] MappedArrays v0.4.2 [128add7d] MicroCollections v0.2.0 [23ae76d9] NDTensors v0.4.9 [77ba4419] NaNMath v1.1.3 [6fd5a793] Octavian v0.3.29 [6fe1bfb0] OffsetArrays v1.17.0 [bac558e1] OrderedCollections v1.8.1 [65ce6f38] PackageExtensionCompat v1.0.2 [1d0040c9] PolyesterWeave v0.2.2 [aea7be01] PrecompileTools v1.3.2 [21216c6a] Preferences v1.5.0 [43287f4e] PtrArrays v1.3.0 [c1ae055f] RealDot v0.1.0 [189a3867] Reexport v1.2.2 [42d2dcc6] Referenceables v0.1.3 [ae029012] Requires v1.3.1 [94e857df] SIMDTypes v0.1.0 [476501e8] SLEEFPirates v0.6.43 [1bc83da4] SafeTestsets v0.1.0 [7e506255] ScopedValues v1.4.0 [efcf1570] Setfield v1.1.2 [699a6c99] SimpleTraits v0.9.5 [dc90abb0] SparseInverseSubset v0.1.2 [276daf66] SpecialFunctions v2.5.1 [03a91e81] SplitApplyCombine v1.2.3 [171d559e] SplittablesBase v0.1.15 [860ef19b] StableRNGs v1.0.3 [aedffcd0] Static v1.2.0 [0d7ed370] StaticArrayInterface v1.8.0 [90137ffa] StaticArrays v1.9.14 [1e83bf80] StaticArraysCore v1.4.3 [10745b16] Statistics v1.11.1 [5e0ebb24] Strided v2.3.2 [4db3bf67] StridedViews v0.4.1 [09ab397b] StructArrays v0.7.1 [3783bdb8] TableTraits v1.0.1 [bd369af6] Tables v1.12.1 [6aa20fa7] TensorOperations v5.3.0 [24d252fe] ThreadedScans v0.1.0 [8290d209] ThreadingUtilities v0.5.5 [a759f4b9] TimerOutputs v0.5.29 [28d57a85] Transducers v0.4.84 [9d95972d] TupleTools v1.6.0 ⌅ [7e5a90cf] TypeParameterAccessors v0.3.10 [3a884ed6] UnPack v1.0.2 [013be700] UnsafeAtomics v0.3.0 [409d34a3] VectorInterface v0.5.0 [3d5dd08c] VectorizationBase v0.21.71 [e88e6eb3] Zygote v0.7.10 [700de1a5] ZygoteRules v0.2.7 [dad2f222] LLVMExtra_jll v0.0.37+2 [efe28fd5] OpenSpecFun_jll v0.5.6+0 [0dad84c5] ArgTools v1.1.2 [56f22d72] Artifacts v1.11.0 [2a0f44e3] Base64 v1.11.0 [ade2ca70] Dates v1.11.0 [8ba89e20] Distributed v1.11.0 [f43a241f] Downloads v1.7.0 [7b1f6079] FileWatching v1.11.0 [9fa8497b] Future v1.11.0 [b77e0a4c] InteractiveUtils v1.11.0 [ac6e5ff7] JuliaSyntaxHighlighting v1.12.0 [4af54fe1] LazyArtifacts v1.11.0 [b27032c2] LibCURL v0.6.4 [76f85450] LibGit2 v1.11.0 [8f399da3] Libdl v1.11.0 [37e2e46d] LinearAlgebra v1.13.0 [56ddb016] Logging v1.11.0 [d6f4376e] Markdown v1.11.0 [ca575930] NetworkOptions v1.3.0 [44cfe95a] Pkg v1.13.0 [de0858da] Printf v1.11.0 [9a3f8284] Random v1.11.0 [ea8e919c] SHA v0.7.0 [9e88b42a] Serialization v1.11.0 [6462fe0b] Sockets v1.11.0 [2f01184e] SparseArrays v1.13.0 [f489334b] StyledStrings v1.11.0 [4607b0f0] SuiteSparse [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test v1.11.0 [cf7118a7] UUIDs v1.11.0 [4ec0a83e] Unicode v1.11.0 [e66e0078] CompilerSupportLibraries_jll v1.3.0+1 [deac9b47] LibCURL_jll v8.15.0+1 [e37daf67] LibGit2_jll v1.9.1+0 [29816b5a] LibSSH2_jll v1.11.3+1 [14a3606d] MozillaCACerts_jll v2025.8.12 [4536629a] OpenBLAS_jll v0.3.29+0 [05823500] OpenLibm_jll v0.8.5+0 [458c3c95] OpenSSL_jll v3.5.2+0 [efcefdf7] PCRE2_jll v10.45.0+0 [bea87d4a] SuiteSparse_jll v7.10.1+0 [83775a58] Zlib_jll v1.3.1+2 [3161d3a3] Zstd_jll v1.5.7+1 [8e850b90] libblastrampoline_jll v5.13.1+0 [8e850ede] nghttp2_jll v1.65.0+0 [3f19e933] p7zip_jll v17.5.0+2 Info Packages marked with ⌅ have new versions available but compatibility constraints restrict them from upgrading. Testing Running tests... Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl Precompiling packages... ✗ Static 4596.1 ms ✓ BlockArrays ✗ StaticArrayInterface 2922.4 ms ✓ BlockArrays → BlockArraysAdaptExt 6214.7 ms ✓ Transducers → TransducersBlockArraysExt ✗ StaticArrayInterface → StaticArrayInterfaceStaticArraysExt ✗ EllipsisNotation 4808.4 ms ✓ NDTensors → NDTensorsGPUArraysCoreExt 4 dependencies successfully precompiled in 40 seconds. 92 already precompiled. ┌ Error: Error during loading of extension NDTensorsGPUArraysCoreExt of NDTensors, use `Base.retry_load_extensions()` to retry. │ exception = │ 1-element ExceptionStack: │ The following 1 direct dependency failed to precompile: │ │ EllipsisNotation │ │ Failed to precompile EllipsisNotation [da5c29d0-fa7d-589e-88eb-ea29b0a81949] to "/home/pkgeval/.julia/compiled/v1.13/EllipsisNotation/jl_nWd5Vf" (ProcessExited(1)). │ ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` │ Suggestion: check for spelling errors or missing imports. │ Stacktrace: │ [1] getproperty │ @ ./Base_compiler.jl:50 [inlined] │ [2] recompile_invalidations(__module__::Module, expr::Any) │ @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 │ [3] top-level scope │ @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 │ [4] include(mod::Module, _path::String) │ @ Base ./Base.jl:308 │ [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) │ @ Base ./loading.jl:3017 │ [6] top-level scope │ @ stdin:5 │ [7] eval(m::Module, e::Any) │ @ Core ./boot.jl:489 │ [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) │ @ Base ./loading.jl:2863 │ [9] include_string │ @ ./loading.jl:2873 [inlined] │ [10] exec_options(opts::Base.JLOptions) │ @ Base ./client.jl:328 │ [11] _start() │ @ Base ./client.jl:563 │ in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 │ in expression starting at stdin:5 │ ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_GutJRs" (ProcessExited(1)). │ Stacktrace: │ [1] error(s::String) │ @ Base ./error.jl:44 │ [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) │ @ Base ./loading.jl:3304 │ [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) │ @ Base ./loading.jl:3182 │ [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() │ @ Base ./loading.jl:2669 │ [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) │ @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 │ [6] #mkpidlock#7 │ @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] │ [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) │ @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 │ [8] #invokelatest_gr#233 │ @ ./reflection.jl:1333 [inlined] │ [9] invokelatest_gr │ @ ./reflection.jl:1325 [inlined] │ [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) │ @ Base ./loading.jl:3875 │ [11] maybe_cachefile_lock │ @ ./loading.jl:3872 [inlined] │ [12] __require_prelocked(pkg::Base.PkgId, env::String) │ @ Base ./loading.jl:2655 │ [13] _require_prelocked(uuidkey::Base.PkgId, env::String) │ @ Base ./loading.jl:2484 │ [14] macro expansion │ @ ./loading.jl:2412 [inlined] │ [15] macro expansion │ @ ./lock.jl:376 [inlined] │ [16] __require(into::Module, mod::Symbol) │ @ Base ./loading.jl:2376 │ [17] require │ @ ./loading.jl:2352 [inlined] │ [18] eval_import_path │ @ ./module.jl:36 [inlined] │ [19] eval_import_path_all(at::Module, path::Expr, keyword::String) │ @ Base ./module.jl:60 │ [20] _eval_using(to::Module, path::Expr) │ @ Base ./module.jl:137 │ [21] top-level scope │ @ ~/.julia/packages/EllipsisNotation/duIu5/src/EllipsisNotation.jl:5 │ [22] include(mod::Module, _path::String) │ @ Base ./Base.jl:308 │ [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) │ @ Base ./loading.jl:3017 │ [24] top-level scope │ @ stdin:5 │ [25] eval(m::Module, e::Any) │ @ Core ./boot.jl:489 │ [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) │ @ Base ./loading.jl:2863 │ [27] include_string │ @ ./loading.jl:2873 [inlined] │ [28] exec_options(opts::Base.JLOptions) │ @ Base ./client.jl:328 │ [29] _start() │ @ Base ./client.jl:563 │ in expression starting at /home/pkgeval/.julia/packages/EllipsisNotation/duIu5/src/EllipsisNotation.jl:3 │ in expression starting at stdin: └ @ Base loading.jl:1591 Precompiling packages... Info Given JLArrays was explicitly requested, output will be shown live  WARNING: Constructor for type "BroadcastStyle" was extended in `JLArrays` without explicit qualification or import.  NOTE: Assumed "BroadcastStyle" refers to `Broadcast.BroadcastStyle`. This behavior is deprecated and may differ in future versions.  NOTE: This behavior may have differed in Julia versions prior to 1.12.  Hint: If you intended to create a new generic function of the same name, use `function BroadcastStyle end`.  Hint: To silence the warning, qualify `BroadcastStyle` as `Broadcast.BroadcastStyle` in the method signature or explicitly `import Broadcast: BroadcastStyle`. 8733.7 ms ✓ JLArrays 1 dependency successfully precompiled in 9 seconds. 54 already precompiled. 1 dependency had output during precompilation: ┌ JLArrays │ [Output was shown above] └ Precompiling packages... 6098.3 ms ✓ TypeParameterAccessors → TypeParameterAccessorsJLArraysExt 1 dependency successfully precompiled in 6 seconds. 57 already precompiled. Precompiling packages... ✗ Static ✗ StaticArrayInterface ✗ StaticArrayInterface → StaticArrayInterfaceStaticArraysExt ✗ EllipsisNotation 9098.3 ms ✓ NDTensors → NDTensorsJLArraysExt 1 dependency successfully precompiled in 29 seconds. 134 already precompiled. ┌ Error: Error during loading of extension NDTensorsJLArraysExt of NDTensors, use `Base.retry_load_extensions()` to retry. │ exception = │ 1-element ExceptionStack: │ The following 1 direct dependency failed to precompile: │ │ EllipsisNotation │ │ Failed to precompile EllipsisNotation [da5c29d0-fa7d-589e-88eb-ea29b0a81949] to "/home/pkgeval/.julia/compiled/v1.13/EllipsisNotation/jl_n7fiDt" (ProcessExited(1)). │ ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` │ Suggestion: check for spelling errors or missing imports. │ Stacktrace: │ [1] getproperty │ @ ./Base_compiler.jl:50 [inlined] │ [2] recompile_invalidations(__module__::Module, expr::Any) │ @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 │ [3] top-level scope │ @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 │ [4] include(mod::Module, _path::String) │ @ Base ./Base.jl:308 │ [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) │ @ Base ./loading.jl:3017 │ [6] top-level scope │ @ stdin:5 │ [7] eval(m::Module, e::Any) │ @ Core ./boot.jl:489 │ [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) │ @ Base ./loading.jl:2863 │ [9] include_string │ @ ./loading.jl:2873 [inlined] │ [10] exec_options(opts::Base.JLOptions) │ @ Base ./client.jl:328 │ [11] _start() │ @ Base ./client.jl:563 │ in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 │ in expression starting at stdin:5 │ ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_p04UYY" (ProcessExited(1)). │ Stacktrace: │ [1] error(s::String) │ @ Base ./error.jl:44 │ [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) │ @ Base ./loading.jl:3304 │ [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) │ @ Base ./loading.jl:3182 │ [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() │ @ Base ./loading.jl:2669 │ [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) │ @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 │ [6] #mkpidlock#7 │ @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] │ [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) │ @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 │ [8] #invokelatest_gr#233 │ @ ./reflection.jl:1333 [inlined] │ [9] invokelatest_gr │ @ ./reflection.jl:1325 [inlined] │ [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) │ @ Base ./loading.jl:3875 │ [11] maybe_cachefile_lock │ @ ./loading.jl:3872 [inlined] │ [12] __require_prelocked(pkg::Base.PkgId, env::String) │ @ Base ./loading.jl:2655 │ [13] _require_prelocked(uuidkey::Base.PkgId, env::String) │ @ Base ./loading.jl:2484 │ [14] macro expansion │ @ ./loading.jl:2412 [inlined] │ [15] macro expansion │ @ ./lock.jl:376 [inlined] │ [16] __require(into::Module, mod::Symbol) │ @ Base ./loading.jl:2376 │ [17] require │ @ ./loading.jl:2352 [inlined] │ [18] eval_import_path │ @ ./module.jl:36 [inlined] │ [19] eval_import_path_all(at::Module, path::Expr, keyword::String) │ @ Base ./module.jl:60 │ [20] _eval_using(to::Module, path::Expr) │ @ Base ./module.jl:137 │ [21] top-level scope │ @ ~/.julia/packages/EllipsisNotation/duIu5/src/EllipsisNotation.jl:5 │ [22] include(mod::Module, _path::String) │ @ Base ./Base.jl:308 │ [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) │ @ Base ./loading.jl:3017 │ [24] top-level scope │ @ stdin:5 │ [25] eval(m::Module, e::Any) │ @ Core ./boot.jl:489 │ [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) │ @ Base ./loading.jl:2863 │ [27] include_string │ @ ./loading.jl:2873 [inlined] │ [28] exec_options(opts::Base.JLOptions) │ @ Base ./client.jl:328 │ [29] _start() │ @ Base ./client.jl:563 │ in expression starting at /home/pkgeval/.julia/packages/EllipsisNotation/duIu5/src/EllipsisNotation.jl:3 │ in expression starting at stdin: └ @ Base loading.jl:1591 test device: jl, eltype: Float32: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:32 Got exception outside of a @test Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:113 [inlined] [7] diag(ETensor::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 1}, NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/blocksparsetensor.jl:368 [8] diag(tensor::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensor/tensor.jl:418 [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:32 [inlined] [12] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [13] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:57 [inlined] [14] eval(m::Module, e::Any) @ Core ./boot.jl:489 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [16] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [17] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [26] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [27] top-level scope @ none:6 [28] eval(m::Module, e::Any) @ Core ./boot.jl:489 [29] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [30] _start() @ Base ./client.jl:563 test device: jl, eltype: Float64: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:32 Got exception outside of a @test Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:113 [inlined] [7] diag(ETensor::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/blocksparsetensor.jl:368 [8] diag(tensor::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensor/tensor.jl:418 [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:32 [inlined] [12] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [13] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:57 [inlined] [14] eval(m::Module, e::Any) @ Core ./boot.jl:489 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [16] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [17] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [26] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [27] top-level scope @ none:6 [28] eval(m::Module, e::Any) @ Core ./boot.jl:489 [29] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [30] _start() @ Base ./client.jl:563 svd example 1: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:293 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float32, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool, ::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:519 [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:39 [21] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:294 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:296 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 2: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:301 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float32, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool, ::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:519 [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:39 [21] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:302 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:304 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 3: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:309 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float32, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool, ::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:519 [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float32, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float32, 2}, B::JLArrays.JLArray{Float32, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:39 [21] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:310 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:312 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 4: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:317 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#253 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:24 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:18 [inlined] [13] #_eigen#255 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:61 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [15] #eigen#254 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:55 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:54 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:318 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:320 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [47] _start() @ Base ./client.jl:563 svd example 5: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:325 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float32, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float32, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#253 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:24 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:18 [inlined] [13] #_eigen#255 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:61 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [15] #eigen#254 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:55 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:54 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::LinearAlgebra.Transpose{Float32, JLArrays.JLArray{Float32, 2}}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::LinearAlgebra.Transpose{Float32, JLArrays.JLArray{Float32, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [20] svd_recursive(M::JLArrays.JLArray{Float32, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:32 [21] svd_recursive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float32, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:326 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:328 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 1: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:293 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float64, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, ::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:519 [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:39 [21] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:294 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:296 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 2: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:301 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float64, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, ::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:519 [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:39 [21] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:302 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:304 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 3: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:309 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float64, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, ::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:519 [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.SYRK}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:39 [21] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:310 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:312 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 svd example 4: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:317 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#253 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:24 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:18 [inlined] [13] #_eigen#255 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:61 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [15] #eigen#254 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:55 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:54 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [20] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [21] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [22] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [23] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [24] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:318 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:320 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [35] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [36] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [41] eval(m::Module, e::Any) @ Core ./boot.jl:489 [42] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [43] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [44] top-level scope @ none:6 [45] eval(m::Module, e::Any) @ Core ./boot.jl:489 [46] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [47] _start() @ Base ./client.jl:563 svd example 5: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:325 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] chkuplofinite(A::JLArrays.JLArray{Float64, 2}, uplo::Char) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:117 [10] syevr!(jobz::Char, range::Char, uplo::Char, A::JLArrays.JLArray{Float64, 2}, vl::Float64, vu::Float64, il::Int64, iu::Int64, abstol::Float64) @ LinearAlgebra.LAPACK /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/lapack.jl:5393 [11] #eigen!#253 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:24 [inlined] [12] eigen! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:18 [inlined] [13] #_eigen#255 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:61 [inlined] [14] _eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:59 [inlined] [15] #eigen#254 @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:55 [inlined] [16] eigen @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/symmetriceigen.jl:54 [inlined] [17] eigen @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:24 [inlined] [18] svd_recursive(M::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:40 [19] svd_recursive(M::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [20] svd_recursive(M::JLArrays.JLArray{Float64, 2}; thresh::Float64, north_pass::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:32 [21] svd_recursive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/svd.jl:29 [22] svd(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; mindim::Nothing, maxdim::Nothing, cutoff::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing, alg::Nothing, min_blockdim::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:108 [23] svd @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:87 [inlined] [24] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}; min_blockdim::Nothing, mindim::Nothing, maxdim::Nothing, cutoff::Nothing, alg::Nothing, use_absolute_cutoff::Nothing, use_relative_cutoff::Nothing) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:58 [25] svd(T::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/linearalgebra.jl:39 [26] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:30 [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:287 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:326 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [32] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:328 [inlined] [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float64, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 3}, JLArrays.JLArray{Float64, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, is::Tuple{Vector{Int64}, Vector{Int64}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/blocksparsetensor.jl:600 [14] contract(tensor::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:94 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [38] _start() @ Base ./client.jl:563 BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float64, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float64, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float64, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, Base.ReshapedArray{Float64, 3, SubArray{Float64, 2, JLArrays.JLArray{Float64, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 3}, JLArrays.JLArray{Float64, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, is::Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/blocksparsetensor.jl:600 [14] contract(tensor::NDTensors.BlockSparseTensor{Float64, 3, Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:94 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [38] _start() @ Base ./client.jl:563 BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float32, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 3}, JLArrays.JLArray{Float32, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, is::Tuple{Vector{Int64}, Vector{Int64}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/blocksparsetensor.jl:600 [14] contract(tensor::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Vector{Int64}, Vector{Int64}, Vector{Int64}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:94 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [38] _start() @ Base ./client.jl:563 BlockSparse * Combiner: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] _permutedims!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}, R1::CartesianIndices{0, Tuple{}}, R2::CartesianIndices{0, Tuple{}}, R3::CartesianIndices{1, Tuple{Base.OneTo{Int64}}}, ds::Int64, dp::Int64) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:322 [10] _copy!(P::PermutedDimsArray{Float32, 3, (3, 1, 2), (2, 3, 1), Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, src::JLArrays.JLArray{Float32, 3}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:311 [11] permutedims!(dest::Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, src::JLArrays.JLArray{Float32, 3}, perm::Tuple{Int64, Int64, Int64}) @ Base.PermutedDimsArrays ./permuteddimsarray.jl:287 [12] permutedims!(Edest::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, Base.ReshapedArray{Float32, 3, SubArray{Float32, 2, JLArrays.JLArray{Float32, 2}, Tuple{UnitRange{Int64}, UnitRange{Int64}}, false}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, Esrc::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 3}, JLArrays.JLArray{Float32, 3}}, perm::Tuple{Int64, Int64, Int64}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/permutedims.jl:6 [13] permutedims_combine(T::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, is::Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}, perm::Tuple{Int64, Int64, Int64}, combdims::Tuple{Int64, Int64}, blockperm::Vector{Int64}, blockcomb::Vector{Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/blocksparsetensor.jl:600 [14] contract(tensor::NDTensors.BlockSparseTensor{Float32, 3, Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}, NDTensors.BlockSparse{Float32, JLArrays.JLArray{Float32, 1}, 3}}, tensor_labels::Tuple{Int64, Int64, Int64}, combiner_tensor::NDTensors.Tensor{Number, 3, NDTensors.Combiner, Tuple{Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}, Main.var"##NDTensors#284".var"##414".Index{Vector{Pair{Main.var"##NDTensors#284".var"##414".QN, Int64}}}}}, combiner_tensor_labels::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/combiner.jl:72 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:40 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:75 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:94 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl:1 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [32] eval(m::Module, e::Any) @ Core ./boot.jl:489 [33] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [34] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [35] top-level scope @ none:6 [36] eval(m::Module, e::Any) @ Core ./boot.jl:489 [37] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [38] _start() @ Base ./client.jl:563 Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_dense.jl DenseTensor basic functionality: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:148 Test threw exception Expression: Array(J * K) ≈ Array(J) * Array(K) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul2x2_elements(tA::Char, A::JLArrays.JLArray{Float64, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1205 [10] __matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1234 [inlined] [11] _matmul2x2_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1199 [inlined] [12] matmul2x2or3x3_nonzeroalpha!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:490 [13] gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [inlined] [14] _syrk_herk_gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:531 [inlined] [15] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Bool, β::Bool, val::Val{LinearAlgebra.BlasFlag.GEMM}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:270 [inlined] [19] * @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:141 [inlined] [20] *(T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, T2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:16 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:15 [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:15 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:19 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:148 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] No permutation: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:234 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 1}, JLArrays.JLArray{ComplexF64, 1}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:6 [10] getindex @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:96 [inlined] [11] _contract_scalar_maybe_perm!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T₁::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT₁::Tuple{Int64, Int64, Int64}, T₂::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT₂::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:134 [12] _contract_scalar! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:155 [inlined] [13] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:172 [14] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:171 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:15 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:15 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:234 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:235 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:240 [inlined] [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:1 [26] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [27] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [32] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [33] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [36] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [37] top-level scope @ none:6 [38] eval(m::Module, e::Any) @ Core ./boot.jl:489 [39] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [40] _start() @ Base ./client.jl:563 Permutation: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:245 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 1}, JLArrays.JLArray{ComplexF64, 1}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:6 [10] getindex @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:96 [inlined] [11] _contract_scalar_maybe_perm!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T₁::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT₁::Tuple{Int64, Int64, Int64}, T₂::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT₂::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:134 [12] _contract_scalar! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:155 [inlined] [13] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:172 [14] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:171 [15] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:15 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:15 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:234 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:246 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:251 [inlined] [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:1 [26] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [27] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [30] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [31] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [32] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [33] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [36] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [37] top-level scope @ none:6 [38] eval(m::Module, e::Any) @ Core ./boot.jl:489 [39] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [40] _start() @ Base ./client.jl:563 Precompiling packages... ✗ Static ✗ BitTwiddlingConvenienceFunctions ✗ StaticArrayInterface ✗ CPUSummary ✗ HostCPUFeatures ✗ StaticArrayInterface → StaticArrayInterfaceOffsetArraysExt ✗ CloseOpenIntervals ✗ LayoutPointers ✗ PolyesterWeave ✗ VectorizationBase ✗ SLEEFPirates ✗ LoopVectorization Info Given Octavian was explicitly requested, output will be shown live  ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` Suggestion: check for spelling errors or missing imports. Stacktrace:  [1] getproperty  @ ./Base_compiler.jl:50 [inlined]  [2] recompile_invalidations(__module__::Module, expr::Any)  @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17  [3] top-level scope  @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19  [4] include(mod::Module, _path::String)  @ Base ./Base.jl:308  [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String)  @ Base ./loading.jl:3017  [6] top-level scope  @ stdin:5  [7] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:2863  [9] include_string  @ ./loading.jl:2873 [inlined]  [10] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:328  [11] _start()  @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_lk0Qnz" (ProcessExited(1)). Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing)  @ Base ./loading.jl:3304  [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool)  @ Base ./loading.jl:3182  [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})()  @ Base ./loading.jl:2669  [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool})  @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94  [6] #mkpidlock#7  @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined]  [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64})  @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115  [8] #invokelatest_gr#233  @ ./reflection.jl:1333 [inlined]  [9] invokelatest_gr  @ ./reflection.jl:1325 [inlined]  [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64)  @ Base ./loading.jl:3875  [11] maybe_cachefile_lock  @ ./loading.jl:3872 [inlined]  [12] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2655  [13] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2484  [14] macro expansion  @ ./loading.jl:2412 [inlined]  [15] macro expansion  @ ./lock.jl:376 [inlined]  [16] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2376  [17] require  @ ./loading.jl:2352 [inlined]  [18] eval_import_path  @ ./module.jl:36 [inlined]  [19] _eval_import(imported::Bool, to::Module, from::Nothing, paths::Expr)  @ Base ./module.jl:111  [20] top-level scope  @ ~/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:6  [21] include(mod::Module, _path::String)  @ Base ./Base.jl:308  [22] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String)  @ Base ./loading.jl:3017  [23] top-level scope  @ stdin:5  [24] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [25] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:2863  [26] include_string  @ ./loading.jl:2873 [inlined]  [27] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:328  [28] _start()  @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile VectorizationBase [3d5dd08c-fd9d-11e8-17fa-ed2836048c2f] to "/home/pkgeval/.julia/compiled/v1.13/VectorizationBase/jl_VYNq6j" (ProcessExited(1)). Stacktrace:  [1] error(s::String)  @ Base ./error.jl:44  [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing)  @ Base ./loading.jl:3304  [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool)  @ Base ./loading.jl:3182  [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})()  @ Base ./loading.jl:2669  [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool})  @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94  [6] #mkpidlock#7  @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined]  [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64})  @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115  [8] #invokelatest_gr#233  @ ./reflection.jl:1333 [inlined]  [9] invokelatest_gr  @ ./reflection.jl:1325 [inlined]  [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64)  @ Base ./loading.jl:3875  [11] maybe_cachefile_lock  @ ./loading.jl:3872 [inlined]  [12] __require_prelocked(pkg::Base.PkgId, env::String)  @ Base ./loading.jl:2655  [13] _require_prelocked(uuidkey::Base.PkgId, env::String)  @ Base ./loading.jl:2484  [14] macro expansion  @ ./loading.jl:2412 [inlined]  [15] macro expansion  @ ./lock.jl:376 [inlined]  [16] __require(into::Module, mod::Symbol)  @ Base ./loading.jl:2376  [17] require  @ ./loading.jl:2352 [inlined]  [18] eval_import_path  @ ./module.jl:36 [inlined]  [19] eval_import_path_all(at::Module, path::Expr, keyword::String)  @ Base ./module.jl:60  [20] _eval_using(to::Module, path::Expr)  @ Base ./module.jl:137  [21] top-level scope  @ ~/.julia/packages/Octavian/4f4xi/src/Octavian.jl:3  [22] include(mod::Module, _path::String)  @ Base ./Base.jl:308  [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing)  @ Base ./loading.jl:3017  [24] top-level scope  @ stdin:5  [25] eval(m::Module, e::Any)  @ Core ./boot.jl:489  [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String)  @ Base ./loading.jl:2863  [27] include_string  @ ./loading.jl:2873 [inlined]  [28] exec_options(opts::Base.JLOptions)  @ Base ./client.jl:328  [29] _start()  @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/Octavian/4f4xi/src/Octavian.jl:1 in expression starting at stdin:5 ✗ Octavian 0 dependencies successfully precompiled in 92 seconds. 26 already precompiled. change backends: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_dense.jl:303 Got exception outside of a @test The following 1 direct dependency failed to precompile: Octavian Failed to precompile Octavian [6fd5a793-0b7e-452c-907f-f8bfe9c57db4] to "/home/pkgeval/.julia/compiled/v1.13/Octavian/jl_KRriTv" (ProcessExited(1)). ERROR: LoadError: UndefVarError: `StaticData` not defined in `Base` Suggestion: check for spelling errors or missing imports. Stacktrace: [1] getproperty @ ./Base_compiler.jl:50 [inlined] [2] recompile_invalidations(__module__::Module, expr::Any) @ PrecompileTools ~/.julia/packages/PrecompileTools/Z8SWe/src/invalidations.jl:17 [3] top-level scope @ ~/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:19 [4] include(mod::Module, _path::String) @ Base ./Base.jl:308 [5] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [6] top-level scope @ stdin:5 [7] eval(m::Module, e::Any) @ Core ./boot.jl:489 [8] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [9] include_string @ ./loading.jl:2873 [inlined] [10] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [11] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/StaticArrayInterface/lkDPR/src/StaticArrayInterface.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile StaticArrayInterface [0d7ed370-da01-4f52-bd93-41d350b8b718] to "/home/pkgeval/.julia/compiled/v1.13/StaticArrayInterface/jl_lk0Qnz" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] _eval_import(imported::Bool, to::Module, from::Nothing, paths::Expr) @ Base ./module.jl:111 [20] top-level scope @ ~/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:6 [21] include(mod::Module, _path::String) @ Base ./Base.jl:308 [22] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::String) @ Base ./loading.jl:3017 [23] top-level scope @ stdin:5 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [26] include_string @ ./loading.jl:2873 [inlined] [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [28] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/VectorizationBase/wHnQd/src/VectorizationBase.jl:1 in expression starting at stdin:5 ERROR: LoadError: Failed to precompile VectorizationBase [3d5dd08c-fd9d-11e8-17fa-ed2836048c2f] to "/home/pkgeval/.julia/compiled/v1.13/VectorizationBase/jl_VYNq6j" (ProcessExited(1)). Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] compilecache(pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool; flags::Cmd, cacheflags::Base.CacheFlags, reasons::Dict{String, Int64}, loadable_exts::Nothing) @ Base ./loading.jl:3304 [3] kwcall(::@NamedTuple{reasons::Dict{String, Int64}, loadable_exts::Nothing}, ::typeof(Base.compilecache), pkg::Base.PkgId, path::String, internal_stderr::IO, internal_stdout::IO, keep_loaded_modules::Bool) @ Base ./loading.jl:3182 [4] (::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId})() @ Base ./loading.jl:2669 [5] mkpidlock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, at::String, pid::Int32; kwopts::@Kwargs{stale_age::Int64, wait::Bool}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:94 [6] #mkpidlock#7 @ /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:89 [inlined] [7] trymkpidlock(::Function, ::Vararg{Any}; kwargs::@Kwargs{stale_age::Int64}) @ FileWatching.Pidfile /opt/julia/share/julia/stdlib/v1.13/FileWatching/src/pidfile.jl:115 [8] #invokelatest_gr#233 @ ./reflection.jl:1333 [inlined] [9] invokelatest_gr @ ./reflection.jl:1325 [inlined] [10] maybe_cachefile_lock(f::Base.var"#__require_prelocked##0#__require_prelocked##1"{Base.PkgId}, pkg::Base.PkgId, srcpath::String; stale_age::Int64) @ Base ./loading.jl:3875 [11] maybe_cachefile_lock @ ./loading.jl:3872 [inlined] [12] __require_prelocked(pkg::Base.PkgId, env::String) @ Base ./loading.jl:2655 [13] _require_prelocked(uuidkey::Base.PkgId, env::String) @ Base ./loading.jl:2484 [14] macro expansion @ ./loading.jl:2412 [inlined] [15] macro expansion @ ./lock.jl:376 [inlined] [16] __require(into::Module, mod::Symbol) @ Base ./loading.jl:2376 [17] require @ ./loading.jl:2352 [inlined] [18] eval_import_path @ ./module.jl:36 [inlined] [19] eval_import_path_all(at::Module, path::Expr, keyword::String) @ Base ./module.jl:60 [20] _eval_using(to::Module, path::Expr) @ Base ./module.jl:137 [21] top-level scope @ ~/.julia/packages/Octavian/4f4xi/src/Octavian.jl:3 [22] include(mod::Module, _path::String) @ Base ./Base.jl:308 [23] include_package_for_output(pkg::Base.PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, concrete_deps::Vector{Pair{Base.PkgId, UInt128}}, source::Nothing) @ Base ./loading.jl:3017 [24] top-level scope @ stdin:5 [25] eval(m::Module, e::Any) @ Core ./boot.jl:489 [26] include_string(mapexpr::typeof(identity), mod::Module, code::String, filename::String) @ Base ./loading.jl:2863 [27] include_string @ ./loading.jl:2873 [inlined] [28] exec_options(opts::Base.JLOptions) @ Base ./client.jl:328 [29] _start() @ Base ./client.jl:563 in expression starting at /home/pkgeval/.julia/packages/Octavian/4f4xi/src/Octavian.jl:1 in expression starting at stdin: Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 1}, NDTensors.DiagTensor{Float32, 0, Tuple{}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}}, x::Float32) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{Float32, 0, Tuple{}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}}}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:317 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:314 [inlined] [12] getindex @ ./reshapedarray.jl:302 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/generic.jl:100 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{Float32, 2}, A::JLArrays.JLArray{Float32, 2}, B::LinearAlgebra.Transpose{Float32, Base.ReshapedArray{Float32, 2, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float32, beta::Float32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1137 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1129 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{Float32, 2}, A::JLArrays.JLArray{Float32, 2}, B::LinearAlgebra.Transpose{Float32, Base.ReshapedArray{Float32, 2, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float32, beta::Float32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1123 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1113 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{Float32, 2}, AM::JLArrays.JLArray{Float32, 2}, BM::LinearAlgebra.Transpose{Float32, Base.ReshapedArray{Float32, 2, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::Float32, β::Float32) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{Float32, 3}, AT::JLArrays.JLArray{Float32, 2}, BT::Base.ReshapedArray{Float32, 3, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [27] contract!(C::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}, Clabels::Tuple{Int64, Int64, Int64}, A::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, Blabels::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [28] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [34] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float32, JLArrays.JLArray{Float32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float32, Base.ReshapedArray{Float32, 1, LinearAlgebra.Adjoint{Float32, JLArrays.JLArray{Float32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [59] _start() @ Base ./client.jl:563 test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 1}, NDTensors.DiagTensor{ComplexF32, 0, Tuple{}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}}, x::ComplexF32) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{ComplexF32, 0, Tuple{}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}}}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:317 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:314 [inlined] [12] getindex @ ./reshapedarray.jl:302 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/generic.jl:100 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{ComplexF32, 2}, A::JLArrays.JLArray{ComplexF32, 2}, B::LinearAlgebra.Transpose{ComplexF32, Base.ReshapedArray{ComplexF32, 2, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF32, beta::ComplexF32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1137 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1129 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{ComplexF32, 2}, A::JLArrays.JLArray{ComplexF32, 2}, B::LinearAlgebra.Transpose{ComplexF32, Base.ReshapedArray{ComplexF32, 2, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF32, beta::ComplexF32) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1123 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1113 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{ComplexF32, 2}, AM::JLArrays.JLArray{ComplexF32, 2}, BM::LinearAlgebra.Transpose{ComplexF32, Base.ReshapedArray{ComplexF32, 2, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::ComplexF32, β::ComplexF32) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{ComplexF32, 3}, AT::JLArrays.JLArray{ComplexF32, 2}, BT::Base.ReshapedArray{ComplexF32, 3, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [27] contract!(C::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, Clabels::Tuple{Int64, Int64, Int64}, A::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, Blabels::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [28] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [34] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF32, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF32, Base.ReshapedArray{ComplexF32, 1, LinearAlgebra.Adjoint{ComplexF32, JLArrays.JLArray{ComplexF32, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [59] _start() @ Base ./client.jl:563 test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, NDTensors.DiagTensor{Float64, 0, Tuple{}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}, x::Float64) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{Float64, 0, Tuple{}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:317 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:314 [inlined] [12] getindex @ ./reshapedarray.jl:302 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/generic.jl:100 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{Float64, 2}, A::JLArrays.JLArray{Float64, 2}, B::LinearAlgebra.Transpose{Float64, Base.ReshapedArray{Float64, 2, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1137 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1129 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{Float64, 2}, A::JLArrays.JLArray{Float64, 2}, B::LinearAlgebra.Transpose{Float64, Base.ReshapedArray{Float64, 2, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1123 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1113 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{Float64, 2}, AM::JLArrays.JLArray{Float64, 2}, BM::LinearAlgebra.Transpose{Float64, Base.ReshapedArray{Float64, 2, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{Float64, 3}, AT::JLArrays.JLArray{Float64, 2}, BT::Base.ReshapedArray{Float64, 3, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [27] contract!(C::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Clabels::Tuple{Int64, Int64, Int64}, A::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, Blabels::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [28] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [34] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{Float64, Base.ReshapedArray{Float64, 1, LinearAlgebra.Adjoint{Float64, JLArrays.JLArray{Float64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [59] _start() @ Base ./client.jl:563 test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 Test threw exception Expression: sqrt((contract(D, (-1, -2), conj(D), (-1, -2)))[]) ≈ norm(D) Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/tensorstorage/tensorstorage.jl:30 [inlined] [7] setindex! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/diagtensor.jl:103 [inlined] [8] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 1}, NDTensors.DiagTensor{ComplexF64, 0, Tuple{}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}}, x::ComplexF64) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:9 [9] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:96 [inlined] [10] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:73 [inlined] [11] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:71 [inlined] [12] _contract!!(output_tensor::NDTensors.DiagTensor{ComplexF64, 0, Tuple{}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsoutput_tensor::Tuple{}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor2::Tuple{Int64, Int64}, α::Int64, β::Int64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [13] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [14] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [15] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [16] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [17] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}}}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [18] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:67 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] test device: jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _unsafe_getindex_rs @ ./reshapedarray.jl:317 [inlined] [11] _unsafe_getindex @ ./reshapedarray.jl:314 [inlined] [12] getindex @ ./reshapedarray.jl:302 [inlined] [13] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/generic.jl:100 [inlined] [15] _generic_matmatmul_nonadjtrans!(C::JLArrays.JLArray{ComplexF64, 2}, A::JLArrays.JLArray{ComplexF64, 2}, B::LinearAlgebra.Transpose{ComplexF64, Base.ReshapedArray{ComplexF64, 2, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF64, beta::ComplexF64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1137 [16] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1129 [inlined] [17] _generic_matmatmul!(C::JLArrays.JLArray{ComplexF64, 2}, A::JLArrays.JLArray{ComplexF64, 2}, B::LinearAlgebra.Transpose{ComplexF64, Base.ReshapedArray{ComplexF64, 2, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, alpha::ComplexF64, beta::ComplexF64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1123 [18] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1113 [inlined] [19] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:348 [inlined] [20] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [21] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [22] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [23] mul!!(CM::JLArrays.JLArray{ComplexF64, 2}, AM::JLArrays.JLArray{ComplexF64, 2}, BM::LinearAlgebra.Transpose{ComplexF64, Base.ReshapedArray{ComplexF64, 2, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}, α::ComplexF64, β::ComplexF64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [24] _contract!(CT::JLArrays.JLArray{ComplexF64, 3}, AT::JLArrays.JLArray{ComplexF64, 2}, BT::Base.ReshapedArray{ComplexF64, 3, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}, props::NDTensors.ContractionProperties{2, 3, 3}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [25] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [26] contract!(R::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsR::Tuple{Int64, Int64, Int64}, T1::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelsT2::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [27] contract!(C::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, Clabels::Tuple{Int64, Int64, Int64}, A::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, Blabels::Tuple{Int64, Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [28] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] ┌[29] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [31] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [32] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [33] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [34] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [35] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labelstensor2::Tuple{Int64, Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [36] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}}}, tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [37] contract(tensor1::NDTensors.DiagTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Diag{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{ComplexF64, 3, Tuple{Int64, Int64, Int64}, NDTensors.Dense{ComplexF64, Base.ReshapedArray{ComplexF64, 1, LinearAlgebra.Adjoint{ComplexF64, JLArrays.JLArray{ComplexF64, 2}}, Tuple{Base.MultiplicativeInverses.SignedMultiplicativeInverse{Int64}}}}}, labels_tensor2::Tuple{Int64, Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [38] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:25 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:98 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:1 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [47] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [48] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [49] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [50] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [51] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [52] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [53] eval(m::Module, e::Any) @ Core ./boot.jl:489 [54] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [55] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [56] top-level scope @ none:6 [57] eval(m::Module, e::Any) @ Core ./boot.jl:489 [58] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [59] _start() @ Base ./client.jl:563 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:115 Test threw exception Expression: contract(A, (1, -2), t, (-2, 3)) == A Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] __matmul3x3_elements(tA::Char, A::JLArrays.JLArray{Float64, 2}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1267 [10] __matmul3x3_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1304 [inlined] [11] _matmul3x3_elements @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1261 [inlined] [12] matmul3x3!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Float64, β::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1327 [13] matmul2x2or3x3_nonzeroalpha! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:483 [inlined] [14] gemm_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Float64, β::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:894 [15] _syrk_herk_gemm_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:531 [inlined] [16] generic_matmatmul_wrapper!(C::JLArrays.JLArray{Float64, 2}, tA::Char, tB::Char, A::JLArrays.JLArray{Float64, 2}, B::JLArrays.JLArray{Float64, 2}, α::Float64, β::Float64, val::Val{LinearAlgebra.BlasFlag.GEMM}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:511 [17] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [18] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [19] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [20] mul!! @ ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [inlined] [21] mul!! @ ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:10 [inlined] [22] _contract!(CT::JLArrays.JLArray{Float64, 2}, AT::JLArrays.JLArray{Float64, 2}, BT::JLArrays.JLArray{Float64, 2}, props::NDTensors.ContractionProperties{2, 2, 2}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [23] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [24] contract!(R::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [25] contract!(C::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Clabels::Tuple{Int64, Int64}, A::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Blabels::Tuple{Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [26] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] [27] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:225 [inlined] ┌[28] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [30] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [31] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [32] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [33] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [34] contract(tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [35] contract(::Type{NDTensors.CanContract{NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [36] contract(tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:115 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [40] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:2041 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:116 Test threw exception Expression: contract(A, (-2, 1), t, (-2, 3)) == transpose(A) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] _generic_matmatmul_generic!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, B::JLArrays.JLArray{Float64, 2}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1168 [11] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1128 [inlined] [12] _generic_matmatmul!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, B::JLArrays.JLArray{Float64, 2}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1123 [13] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1113 [inlined] [14] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:348 [inlined] [15] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [16] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [17] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [18] mul!!(CM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, AM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, BM::JLArrays.JLArray{Float64, 2}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [19] _contract!(CT::JLArrays.JLArray{Float64, 2}, AT::JLArrays.JLArray{Float64, 2}, BT::JLArrays.JLArray{Float64, 2}, props::NDTensors.ContractionProperties{2, 2, 2}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [20] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [21] contract!(R::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::Bool, β::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [22] contract!(C::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Clabels::Tuple{Int64, Int64}, A::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Blabels::Tuple{Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [23] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] [24] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:225 [inlined] ┌[25] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [27] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [28] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [29] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [30] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [31] contract(tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [32] contract(::Type{NDTensors.CanContract{NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [33] contract(tensor1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [34] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] [35] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:116 [inlined] [36] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [37] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:2041 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:120 Test threw exception Expression: ≈((contract(t, (-1, -2), A, (-1, -2)))[], dot(dev(array(t)), array(A)), rtol = sqrt(eps(elt))) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, JLArrays.JLArray{Float64, 1}}, I::Int64) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:13 [7] getindex @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:101 [inlined] [8] getindex @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:106 [inlined] [9] getdiagindex @ ~/.julia/packages/NDTensors/Lb78J/src/tensor/tensor.jl:412 [inlined] [10] contract!(C::NDTensors.DenseTensor{Float64, 0, Tuple{}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Clabels::Tuple{}, A::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Blabels::Tuple{Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:125 [11] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] ┌[12] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [14] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [15] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [16] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [17] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [18] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [19] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [20] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [21] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] [22] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:120 [inlined] [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [24] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:2041 DiagTensor contractions: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:125 Test threw exception Expression: ≈(dot(t, A), dot(dev(array(t)), array(A)), rtol = sqrt(eps(elt))) Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] getindex(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 1}, JLArrays.JLArray{Float64, 1}}, I::Int64) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:13 [7] getindex @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:101 [inlined] [8] getindex @ ~/.julia/packages/NDTensors/Lb78J/src/dense/densetensor.jl:106 [inlined] [9] getdiagindex @ ~/.julia/packages/NDTensors/Lb78J/src/tensor/tensor.jl:412 [inlined] [10] contract!(C::NDTensors.DenseTensor{Float64, 0, Tuple{}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Clabels::Tuple{}, A::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Blabels::Tuple{Int64, Int64}, α::Bool, β::Bool; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:125 [11] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] ┌[12] contract! │ @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] ╰──── repeated 2 times [14] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:143 [inlined] [15] _contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:131 [inlined] [16] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:219 [inlined] [17] contract!! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:188 [inlined] [18] contract(tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, labelstensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelstensor2::Tuple{Int64, Int64}, labelsoutput_tensor::Tuple{}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:113 [19] contract(::Type{NDTensors.CanContract{NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}}}, tensor1::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, labels_tensor1::Tuple{Int64, Int64}, tensor2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labels_tensor2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:91 [20] contract @ ~/.julia/packages/SimpleTraits/7VJph/src/SimpleTraits.jl:332 [inlined] [21] dot(x::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, y::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:28 [22] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:742 [inlined] [23] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:125 [inlined] [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [25] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diag.jl:2041 Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diagblocksparse.jl DiagBlockSparse contract: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diagblocksparse.jl:60 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] getindex @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/adjtrans.jl:348 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/generic.jl:100 [inlined] [11] _generic_matmatmul_nonadjtrans!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::Matrix{Float64}, B::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1137 [12] __generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1129 [inlined] [13] _generic_matmatmul!(C::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, A::Matrix{Float64}, B::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, alpha::Float64, beta::Float64) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1123 [14] generic_matmatmul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:1113 [inlined] [15] generic_matmatmul_wrapper! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:348 [inlined] [16] _mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:333 [inlined] [17] mul! @ /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/matmul.jl:302 [inlined] [18] mul! @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/mul.jl:2 [inlined] [19] mul!!(CM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, AM::Matrix{Float64}, BM::LinearAlgebra.Transpose{Float64, JLArrays.JLArray{Float64, 2}}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/mul.jl:2 [20] _contract!(CT::JLArrays.JLArray{Float64, 2}, AT::Matrix{Float64}, BT::JLArrays.JLArray{Float64, 2}, props::NDTensors.ContractionProperties{2, 2, 2}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/abstractarray/tensoralgebra/contract.jl:174 [21] _contract! @ ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:230 [inlined] [22] contract!(R::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, Vector{Float64}}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, labelsT2::Tuple{Int64, Int64}, α::Float64, β::Float64) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/dense/tensoralgebra/contract.jl:213 [23] contract!(C::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Clabels::Tuple{Int64, Int64}, A::NDTensors.DiagTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Diag{Float64, Float64}}, Alabels::Tuple{Int64, Int64}, B::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}, Blabels::Tuple{Int64, Int64}, α::Float64, β::Float64; convert_to_dense::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:141 [24] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:103 [inlined] [25] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/diag/tensoralgebra/contract.jl:225 [inlined] [26] contract! @ ~/.julia/packages/NDTensors/Lb78J/src/tensoroperations/generic_tensor_operations.jl:165 [inlined] [27] contract!(R::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsR::Tuple{Int64, Int64}, T1::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DiagBlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.DiagBlockSparse{Float64, Float64, 2}}, labelsT2::Tuple{Int64, Int64}, contraction_plan::Vector{Tuple{NDTensors.Block{2}, NDTensors.Block{2}, NDTensors.Block{2}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/diagblocksparse.jl:671 [28] contract(T1::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DiagBlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.DiagBlockSparse{Float64, Float64, 2}}, labelsT2::Tuple{Int64, Int64}, labelsR::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/diagblocksparse.jl:621 [29] contract(T1::NDTensors.BlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.BlockSparse{Float64, JLArrays.JLArray{Float64, 1}, 2}}, labelsT1::Tuple{Int64, Int64}, T2::NDTensors.DiagBlockSparseTensor{Float64, 2, Tuple{Vector{Int64}, Vector{Int64}}, NDTensors.DiagBlockSparse{Float64, Float64, 2}}, labelsT2::Tuple{Int64, Int64}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/blocksparse/diagblocksparse.jl:620 [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_diagblocksparse.jl:67 [inlined] [31] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diagblocksparse.jl:2041 [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_diagblocksparse.jl:1 [35] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [36] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [37] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [38] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [39] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [40] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [41] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [42] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [43] eval(m::Module, e::Any) @ Core ./boot.jl:489 [44] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [45] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [46] top-level scope @ none:6 [47] eval(m::Module, e::Any) @ Core ./boot.jl:489 [48] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [49] _start() @ Base ./client.jl:563 Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_emptynumber.jl Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_emptystorage.jl Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl Dense qr decomposition, elt=Float64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float64, 2}(::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) The type `JLArrays.JLArray{Float64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float64, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float64, JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float64, 2}, JLArrays.JLArray{Float64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF64, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) The type `JLArrays.JLArray{ComplexF64, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF64, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF64, JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF64, 2}, JLArrays.JLArray{ComplexF64, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=Float32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{Float32, 2}(::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) The type `JLArrays.JLArray{Float32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{Float32, 2}}, Q::LinearAlgebra.QRCompactWYQ{Float32, JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 2}, JLArrays.JLArray{Float32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qx(qx::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:353 [3] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [4] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [5] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [6] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [9] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [10] eval(m::Module, e::Any) @ Core ./boot.jl:489 [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [12] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [13] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [14] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ none:6 [24] eval(m::Module, e::Any) @ Core ./boot.jl:489 [25] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [26] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense qr decomposition, elt=ComplexF32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test MethodError: no method matching JLArrays.JLArray{ComplexF32, 2}(::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) The type `JLArrays.JLArray{ComplexF32, 2}` exists, but no method is defined for this combination of argument types when trying to construct it. Closest candidates are: JLArrays.JLArray{T, N}(!Matched::GPUArrays.DataRef{Vector{UInt8}}, !Matched::NTuple{N, Int64}; offset) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:100 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Int64}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:88 JLArrays.JLArray{T, N}(!Matched::UndefInitializer, !Matched::NTuple{N, Integer}) where {T, N} @ JLArrays ~/.julia/packages/JLArrays/vqfM6/src/JLArrays.jl:129 ... Stacktrace: [1] convert(::Type{JLArrays.JLArray{ComplexF32, 2}}, Q::LinearAlgebra.QRCompactWYQ{ComplexF32, JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ LinearAlgebra /opt/julia/share/julia/stdlib/v1.13/LinearAlgebra/src/abstractq.jl:49 [2] qr_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:383 [3] qr_positive(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 2}, JLArrays.JLArray{ComplexF32, 2}}) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:11 [4] qx(qx::typeof(NDTensors.Expose.qr_positive), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [5] qr(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:330 [6] kwcall(::@NamedTuple{positive::Bool}, ::typeof(LinearAlgebra.qr), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:328 [7] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [8] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [9] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [10] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [11] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [12] eval(m::Module, e::Any) @ Core ./boot.jl:489 [13] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [14] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [15] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [20] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [21] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [22] eval(m::Module, e::Any) @ Core ./boot.jl:489 [23] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [24] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [25] top-level scope @ none:6 [26] eval(m::Module, e::Any) @ Core ./boot.jl:489 [27] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [28] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float64, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float64, JLArrays.JLArray{Float64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF64, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF64, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF64, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF64, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF64, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF64, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF64, JLArrays.JLArray{ComplexF64, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=Float32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{Float32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{Float32, 2, Tuple{Int64, Int64}, NDTensors.Dense{Float32, JLArrays.JLArray{Float32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF32, positve=false, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF32, positve=false, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:16 [inlined] [12] qx(qx::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [13] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [14] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [15] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [16] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [17] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [18] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [19] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [20] eval(m::Module, e::Any) @ Core ./boot.jl:489 [21] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [22] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [23] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [24] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [25] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [26] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [27] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [28] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [29] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [30] eval(m::Module, e::Any) @ Core ./boot.jl:489 [31] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [32] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [33] top-level scope @ none:6 [34] eval(m::Module, e::Any) @ Core ./boot.jl:489 [35] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [36] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF32, positve=true, singular=false, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Dense ql decomposition, elt=ComplexF32, positve=true, singular=true, device=jl: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 Got exception outside of a @test Scalar indexing is disallowed. Invocation of getindex resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:50 [inlined] [6] scalar_getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:36 [inlined] [7] _getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:19 [inlined] [8] getindex @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:17 [inlined] [9] ql!(A::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:449 [10] ql @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:428 [inlined] [11] ql_positive(M::JLArrays.JLArray{ComplexF32, 2}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:403 [12] ql_positive @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/linearalgebra.jl:20 [inlined] [13] qx @ ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:344 [inlined] [14] ql(T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}; positive::Bool) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:336 [15] kwcall(::@NamedTuple{positive::Bool}, ::typeof(NDTensors.Expose.ql), T::NDTensors.DenseTensor{ComplexF32, 2, Tuple{Int64, Int64}, NDTensors.Dense{ComplexF32, JLArrays.JLArray{ComplexF32, 1}}}) @ NDTensors ~/.julia/packages/NDTensors/Lb78J/src/linearalgebra/linearalgebra.jl:334 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:60 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [inlined] [19] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:29 [21] eval(m::Module, e::Any) @ Core ./boot.jl:489 [22] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl:1 [23] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [24] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [29] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [30] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [31] eval(m::Module, e::Any) @ Core ./boot.jl:489 [32] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [33] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [34] top-level scope @ none:6 [35] eval(m::Module, e::Any) @ Core ./boot.jl:489 [36] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [37] _start() @ Base ./client.jl:563 Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_tupletools.jl Running /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl NDTensors.BackendSelection.Algorithm type , NamedTuple() Testing Expose jl, Float32: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:23 Got exception outside of a @test Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] scalar_setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:42 [inlined] [7] _setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:26 [inlined] [8] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:24 [inlined] [9] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{Float32, 1}, JLArrays.JLArray{Float32, 1}}, x::Int64) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:9 [10] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:62 [inlined] [11] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [12] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:2041 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:1 [15] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl:12 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl:2041 [19] eval(m::Module, e::Any) @ Core ./boot.jl:489 [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl:1 [21] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [22] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [24] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [31] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [32] top-level scope @ none:6 [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [35] _start() @ Base ./client.jl:563 Testing Expose jl, ComplexF32: Error During Test at /home/pkgeval/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:23 Got exception outside of a @test Scalar indexing is disallowed. Invocation of setindex! resulted in scalar indexing of a GPU array. This is typically caused by calling an iterating implementation of a method. Such implementations *do not* execute on the GPU, but very slowly on the CPU, and therefore should be avoided. If you want to allow scalar iteration, use `allowscalar` or `@allowscalar` to enable scalar iteration globally or for the operations in question. Stacktrace: [1] error(s::String) @ Base ./error.jl:44 [2] errorscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:151 [3] _assertscalar(op::String, behavior::GPUArraysCore.ScalarIndexing) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:124 [4] assertscalar(op::String) @ GPUArraysCore ~/.julia/packages/GPUArraysCore/aNaXo/src/GPUArraysCore.jl:112 [5] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:58 [inlined] [6] scalar_setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:42 [inlined] [7] _setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:26 [inlined] [8] setindex! @ ~/.julia/packages/GPUArrays/u6tui/src/host/indexing.jl:24 [inlined] [9] setindex!(E::NDTensors.Expose.Exposed{JLArrays.JLArray{ComplexF32, 1}, JLArrays.JLArray{ComplexF32, 1}}, x::Int64) @ NDTensors.Expose ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/src/functions/abstractarray.jl:9 [10] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:62 [inlined] [11] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [12] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:2041 [13] eval(m::Module, e::Any) @ Core ./boot.jl:489 [14] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/src/lib/Expose/test/runtests.jl:1 [15] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [16] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl:12 [inlined] [17] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [18] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl:2041 [19] eval(m::Module, e::Any) @ Core ./boot.jl:489 [20] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl:1 [21] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [22] top-level scope @ ~/.julia/packages/SafeTestsets/raUNr/src/SafeTestsets.jl:4 [23] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [24] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:7 [inlined] [25] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:1929 [inlined] [26] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:13 [inlined] [27] macro expansion @ /opt/julia/share/julia/stdlib/v1.13/Test/src/Test.jl:2018 [inlined] [28] macro expansion @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:15 [inlined] [29] eval(m::Module, e::Any) @ Core ./boot.jl:489 [30] top-level scope @ ~/.julia/packages/NDTensors/Lb78J/test/runtests.jl:28 [31] include(mapexpr::Function, mod::Module, _path::String) @ Base ./Base.jl:309 [32] top-level scope @ none:6 [33] eval(m::Module, e::Any) @ Core ./boot.jl:489 [34] exec_options(opts::Base.JLOptions) @ Base ./client.jl:296 [35] _start() @ Base ./client.jl:563 Test Summary: | Pass Error Total Time NDTensors | 1952 67 2019 17m47.9s /home/pkgeval/.julia/packages/NDTensors/Lb78J/test | 1952 67 2019 17m45.1s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_blocksparse.jl | 575 12 587 4m14.0s BlockSparseTensor basic functionality | 575 12 587 2m35.0s test device: cpu, eltype: Float32 | 230 230 31.3s test device: cpu, eltype: Float64 | 230 230 15.5s test device: jl, eltype: Float32 | 10 1 11 3.8s test device: jl, eltype: Float64 | 10 1 11 0.8s BlockSparseTensor setindex! add block | 79 79 0.1s svd on cpu, eltype: Float32 | 5 5 23.4s svd on cpu, eltype: Float64 | 5 5 19.5s svd on jl, eltype: Float32 | 5 5 17.0s svd example 1 | 1 1 16.0s svd example 2 | 1 1 0.1s svd example 3 | 1 1 0.1s svd example 4 | 1 1 0.7s svd example 5 | 1 1 0.1s svd on jl, eltype: Float64 | 5 5 15.6s svd example 1 | 1 1 14.7s svd example 2 | 1 1 0.1s svd example 3 | 1 1 0.1s svd example 4 | 1 1 0.7s svd example 5 | 1 1 0.1s exp, eltype: Float32 | 3 3 14.8s exp, eltype: Float64 | 3 3 12.9s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_combiner.jl | 96 4 100 1m24.1s CombinerTensor basic functionality | 96 4 100 1m23.9s test device: cpu, eltype: Float64 | 36 36 37.4s test device: cpu, eltype: Float32 | 36 36 18.0s test device: jl, eltype: Float64 | 12 2 14 15.8s Dense * Combiner | 12 12 6.0s BlockSparse * Combiner | 1 1 7.5s BlockSparse * Combiner | 1 1 2.4s test device: jl, eltype: Float32 | 12 2 14 12.4s Dense * Combiner | 12 12 3.6s BlockSparse * Combiner | 1 1 6.5s BlockSparse * Combiner | 1 1 2.3s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_dense.jl | 538 4 542 3m26.0s Dense Tensors | 538 4 542 3m25.5s test device: cpu | 260 260 27.5s test device: jl | 255 3 258 1m06.8s DenseTensor basic functionality | 116 1 117 27.4s Random constructor | 8 8 6.3s Complex Valued Tensors | 72 72 2.4s Custom inds types | 21 21 1.1s generic contraction | 36 36 0.0s Contraction with size 1 block and NaN | 2 2 4 29.7s No permutation | 1 1 2 29.6s Permutation | 1 1 2 0.1s Contract with exotic types | 14 14 17.3s change backends | 6 6 1.5s change backends | 3 1 4 1m32.0s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diag.jl | 254 12 266 5m33.7s DiagTensor basic functionality | 240 8 248 5m01.2s test device: cpu | 31 31 24.8s test device: cpu | 31 31 23.0s test device: cpu | 31 31 19.9s test device: cpu | 31 31 19.3s test device: jl | 29 2 31 55.7s test device: jl | 29 2 31 1m07.5s test device: jl | 29 2 31 53.7s test device: jl | 29 2 31 37.1s DiagTensor contractions | 9 9 10.5s DiagTensor contractions | 5 4 9 20.5s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_diagblocksparse.jl | 28 1 29 1m18.8s UniformDiagBlockSparseTensor basic functionality | 10 10 1.1s DiagBlockSparse off-diagonal (eltype=Float32) | 2 2 12.9s DiagBlockSparse off-diagonal (eltype=Float64) | 2 2 10.0s DiagBlockSparse off-diagonal (eltype=ComplexF32) | 2 2 13.6s DiagBlockSparse off-diagonal (eltype=ComplexF64) | 2 2 13.4s DiagBlockSparse contract | 6 6 9.1s DiagBlockSparse contract | 1 1 5.9s UniformDiagBlockSparse norm | 2 2 0.0s DiagBlockSparse denseblocks | 2 2 0.3s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_emptynumber.jl | 28 28 0.6s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_emptystorage.jl | 24 24 2.5s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_linearalgebra.jl | 230 32 262 57.4s random_orthog | 3 3 1.6s random_unitary | 3 3 1.7s QX testing | 224 32 256 51.9s Dense qr decomposition, elt=Float64, positve=false, singular=false, device=cpu | 5 5 1.4s Dense qr decomposition, elt=Float64, positve=false, singular=false, device=jl | 1 1 7.2s Dense qr decomposition, elt=Float64, positve=false, singular=true, device=cpu | 5 5 0.4s Dense qr decomposition, elt=Float64, positve=false, singular=true, device=jl | 1 1 0.8s Dense qr decomposition, elt=Float64, positve=true, singular=false, device=cpu | 9 9 2.0s Dense qr decomposition, elt=Float64, positve=true, singular=false, device=jl | 1 1 0.1s Dense qr decomposition, elt=Float64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=Float64, positve=true, singular=true, device=jl | 1 1 0.1s Dense qr decomposition, elt=ComplexF64, positve=false, singular=false, device=cpu | 5 5 2.0s Dense qr decomposition, elt=ComplexF64, positve=false, singular=false, device=jl | 1 1 4.2s Dense qr decomposition, elt=ComplexF64, positve=false, singular=true, device=cpu | 5 5 0.4s Dense qr decomposition, elt=ComplexF64, positve=false, singular=true, device=jl | 1 1 0.7s Dense qr decomposition, elt=ComplexF64, positve=true, singular=false, device=cpu | 9 9 1.2s Dense qr decomposition, elt=ComplexF64, positve=true, singular=false, device=jl | 1 1 0.2s Dense qr decomposition, elt=ComplexF64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=ComplexF64, positve=true, singular=true, device=jl | 1 1 0.2s Dense qr decomposition, elt=Float32, positve=false, singular=false, device=cpu | 5 5 2.4s Dense qr decomposition, elt=Float32, positve=false, singular=false, device=jl | 1 1 1.6s Dense qr decomposition, elt=Float32, positve=false, singular=true, device=cpu | 5 5 0.4s Dense qr decomposition, elt=Float32, positve=false, singular=true, device=jl | 1 1 0.7s Dense qr decomposition, elt=Float32, positve=true, singular=false, device=cpu | 9 9 1.8s Dense qr decomposition, elt=Float32, positve=true, singular=false, device=jl | 1 1 0.1s Dense qr decomposition, elt=Float32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=Float32, positve=true, singular=true, device=jl | 1 1 0.1s Dense qr decomposition, elt=ComplexF32, positve=false, singular=false, device=cpu | 5 5 3.0s Dense qr decomposition, elt=ComplexF32, positve=false, singular=false, device=jl | 1 1 2.9s Dense qr decomposition, elt=ComplexF32, positve=false, singular=true, device=cpu | 5 5 0.4s Dense qr decomposition, elt=ComplexF32, positve=false, singular=true, device=jl | 1 1 0.8s Dense qr decomposition, elt=ComplexF32, positve=true, singular=false, device=cpu | 9 9 1.5s Dense qr decomposition, elt=ComplexF32, positve=true, singular=false, device=jl | 1 1 0.2s Dense qr decomposition, elt=ComplexF32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense qr decomposition, elt=ComplexF32, positve=true, singular=true, device=jl | 1 1 0.2s Dense ql decomposition, elt=Float64, positve=false, singular=false, device=cpu | 5 5 1.1s Dense ql decomposition, elt=Float64, positve=false, singular=false, device=jl | 1 1 2.4s Dense ql decomposition, elt=Float64, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=Float64, positve=false, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=false, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float64, positve=true, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF64, positve=false, singular=false, device=cpu | 5 5 1.1s Dense ql decomposition, elt=ComplexF64, positve=false, singular=false, device=jl | 1 1 2.4s Dense ql decomposition, elt=ComplexF64, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=ComplexF64, positve=false, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF64, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF64, positve=true, singular=false, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF64, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF64, positve=true, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float32, positve=false, singular=false, device=cpu | 5 5 1.1s Dense ql decomposition, elt=Float32, positve=false, singular=false, device=jl | 1 1 2.3s Dense ql decomposition, elt=Float32, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=Float32, positve=false, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=false, device=jl | 1 1 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=Float32, positve=true, singular=true, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF32, positve=false, singular=false, device=cpu | 5 5 1.2s Dense ql decomposition, elt=ComplexF32, positve=false, singular=false, device=jl | 1 1 2.5s Dense ql decomposition, elt=ComplexF32, positve=false, singular=true, device=cpu | 5 5 0.0s Dense ql decomposition, elt=ComplexF32, positve=false, singular=true, device=jl | 1 1 0.1s Dense ql decomposition, elt=ComplexF32, positve=true, singular=false, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=false, device=jl | 1 1 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=true, device=cpu | 9 9 0.0s Dense ql decomposition, elt=ComplexF32, positve=true, singular=true, device=jl | 1 1 0.1s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/test_tupletools.jl | 7 7 0.2s Test /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/lib/runtests.jl | 172 2 174 47.7s Test NDTensors lib AMDGPUExtensions | 2 2 0.1s Test NDTensors lib BackendSelection | 12 12 0.8s Test NDTensors lib CUDAExtensions | 2 2 0.0s Test NDTensors lib GPUArraysCoreExtensions | 1 1 0.0s Test NDTensors lib MetalExtensions | 1 1 0.0s Test NDTensors lib Expose | 154 2 156 46.5s Testing Expose cpu, Float32 | 64 64 19.4s Testing Expose cpu, ComplexF32 | 64 64 15.1s Testing Expose jl, Float32 | 13 1 14 3.1s Testing Expose jl, ComplexF32 | 13 1 14 2.3s RNG of the outermost testset: Random.Xoshiro(0xf9a4fa247803cffd, 0x4cb907c29a66e6e0, 0x7994ccb83a145381, 0xe9f407684a74fc36, 0xa56444df72d10b9c) ERROR: LoadError: Some tests did not pass: 1952 passed, 0 failed, 67 errored, 0 broken. in expression starting at /home/pkgeval/.julia/packages/NDTensors/Lb78J/test/runtests.jl:3 Testing failed after 1082.09s ERROR: LoadError: Package NDTensors errored during testing Stacktrace: [1] pkgerror(msg::String) @ Pkg.Types /opt/julia/share/julia/stdlib/v1.13/Pkg/src/Types.jl:68 [2] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, julia_args::Cmd, test_args::Cmd, test_fn::Nothing, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool) @ Pkg.Operations /opt/julia/share/julia/stdlib/v1.13/Pkg/src/Operations.jl:2673 [3] test @ /opt/julia/share/julia/stdlib/v1.13/Pkg/src/Operations.jl:2522 [inlined] [4] test(ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}; coverage::Bool, test_fn::Nothing, julia_args::Cmd, test_args::Cmd, force_latest_compatible_version::Bool, allow_earlier_backwards_compatible_versions::Bool, allow_reresolve::Bool, kwargs::@Kwargs{io::IOContext{IO}}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:538 [5] kwcall(::@NamedTuple{julia_args::Cmd, io::IOContext{IO}}, ::typeof(Pkg.API.test), ctx::Pkg.Types.Context, pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:515 [6] test(pkgs::Vector{PackageSpec}; io::IOContext{IO}, kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:168 [7] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkgs::Vector{PackageSpec}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:157 [8] test(pkgs::Vector{String}; kwargs::@Kwargs{julia_args::Cmd}) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:156 [9] test @ /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:156 [inlined] [10] kwcall(::@NamedTuple{julia_args::Cmd}, ::typeof(Pkg.API.test), pkg::String) @ Pkg.API /opt/julia/share/julia/stdlib/v1.13/Pkg/src/API.jl:155 [11] top-level scope @ /PkgEval.jl/scripts/evaluate.jl:219 [12] include(mod::Module, _path::String) @ Base ./Base.jl:308 [13] exec_options(opts::Base.JLOptions) @ Base ./client.jl:330 [14] _start() @ Base ./client.jl:563 in expression starting at /PkgEval.jl/scripts/evaluate.jl:210 PkgEval failed after 1323.23s: package fails to precompile