Package evaluation of InvertibleNetworks on Julia 1.10.9 (96dc2d8c45*) started at 2025-06-06T23:02:38.116 ################################################################################ # Set-up # Installing PkgEval dependencies (TestEnv)... Set-up completed after 5.19s ################################################################################ # Installation # Installing InvertibleNetworks... Resolving package versions... Installed Conda ── v1.10.2 Installed PyCall ─ v1.96.4 Updating `~/.julia/environments/v1.10/Project.toml` [b7115f24] + InvertibleNetworks v2.3.0 Updating `~/.julia/environments/v1.10/Manifest.toml` [621f4979] + AbstractFFTs v1.5.0 [7f219486] + AbstractNFFTs v0.8.2 [7d9f7c33] + Accessors v0.1.42 [79e6a3ab] + Adapt v4.3.0 [66dad0bd] + AliasTables v1.1.3 [dce04be8] + ArgCheck v2.5.0 [a9b6321e] + Atomix v1.1.1 [ab4f0b2a] + BFloat16s v0.5.1 [198e06fe] + BangBang v0.4.4 [9718e550] + Baselet v0.1.1 [26cce99e] + BasicInterpolators v0.7.1 [fa961155] + CEnum v0.5.0 [052768ef] + CUDA v5.8.2 [1af6417a] + CUDA_Runtime_Discovery v0.3.5 [082447d4] + ChainRules v1.72.4 [d360d2e6] + ChainRulesCore v1.25.1 [3da002f7] + ColorTypes v0.12.1 [5ae59095] + Colors v0.13.1 [bbf7d656] + CommonSubexpressions v0.3.1 [34da2185] + Compat v4.16.0 [a33af91c] + CompositionsBase v0.1.2 [8f4d0f93] + Conda v1.10.2 [187b0558] + ConstructionBase v1.5.8 [6add18c4] + ContextVariablesX v0.1.3 [a8cc5b0e] + Crayons v4.1.1 ⌅ [717857b8] + DSP v0.7.10 [9a962f9c] + DataAPI v1.16.0 [a93c6f00] + DataFrames v1.7.0 [864edb3b] + DataStructures v0.18.22 [e2d170a0] + DataValueInterfaces v1.0.0 [244e2a9f] + DefineSingletons v0.1.2 [8bb1440f] + DelimitedFiles v1.9.1 [163ba53b] + DiffResults v1.1.0 [b552c78f] + DiffRules v1.15.1 [aaf54ef3] + DistributedArrays v0.6.7 [ffbed154] + DocStringExtensions v0.9.4 [e2ba6199] + ExprTools v0.1.10 [7a1cc6ca] + FFTW v1.9.0 [cc61a311] + FLoops v0.2.2 [b9860ae5] + FLoopsBase v0.1.1 [1a297f60] + FillArrays v1.13.0 [53c48c17] + FixedPointNumbers v0.8.5 ⌅ [587475ba] + Flux v0.14.25 [f6369f11] + ForwardDiff v1.0.1 ⌅ [d9f16b24] + Functors v0.4.12 [0c68f7d7] + GPUArrays v11.2.2 [46192b85] + GPUArraysCore v0.2.0 [61eb1bfa] + GPUCompiler v1.5.1 [096a3bc2] + GPUToolbox v0.2.0 [076d061b] + HashArrayMappedTries v0.2.0 [7869d1d1] + IRTools v0.4.14 [22cec73e] + InitialValues v0.3.1 [842dd82b] + InlineStrings v1.4.3 [505f98c9] + InplaceOps v0.3.0 [18e54dd8] + IntegerMathUtils v0.1.2 [3587e190] + InverseFunctions v0.1.17 [41ab1584] + InvertedIndices v1.3.1 [b7115f24] + InvertibleNetworks v2.3.0 [92d709cd] + IrrationalConstants v0.2.4 [c8e1da08] + IterTools v1.10.0 [42fd0dbc] + IterativeSolvers v0.9.4 [82899510] + IteratorInterfaceExtensions v1.0.0 [692b3bcd] + JLLWrappers v1.7.0 ⌅ [bb331ad6] + JOLI v0.8.5 [682c06a0] + JSON v0.21.4 [b14d175d] + JuliaVariables v0.2.4 [63c18a36] + KernelAbstractions v0.9.34 [929cbde3] + LLVM v9.4.0 [8b046642] + LLVMLoopInfo v1.0.0 [b964fa9f] + LaTeXStrings v1.4.0 [2ab3a3ac] + LogExpFunctions v0.3.29 [c2834f40] + MLCore v1.0.0 ⌃ [7e8f7934] + MLDataDevices v1.5.3 [d8e11817] + MLStyle v0.4.17 [f1d291b0] + MLUtils v0.4.8 [1914dd2f] + MacroTools v0.5.16 [c03570c3] + Memoize v0.4.4 [128add7d] + MicroCollections v0.2.0 [e1d29d7a] + Missings v1.2.0 [efe261a4] + NFFT v0.13.7 [872c559c] + NNlib v0.9.30 [5da4648a] + NVTX v1.0.0 [77ba4419] + NaNMath v1.1.3 [71a1bf82] + NameResolution v0.1.5 [4d1e1d77] + Nullables v1.0.0 [0b1bfda6] + OneHotArrays v0.2.10 ⌅ [3bd65402] + Optimisers v0.3.4 [bac558e1] + OrderedCollections v1.8.1 [69de0a69] + Parsers v2.8.3 [f27b6e38] + Polynomials v4.0.19 [2dfb63ee] + PooledArrays v1.4.3 ⌅ [aea7be01] + PrecompileTools v1.2.1 [21216c6a] + Preferences v1.4.3 [8162dcfd] + PrettyPrint v0.2.0 [08abe8d2] + PrettyTables v2.4.0 [27ebfcd6] + Primes v0.5.7 [33c8b6b6] + ProgressLogging v0.1.4 [43287f4e] + PtrArrays v1.3.0 [438e738f] + PyCall v1.96.4 [74087812] + Random123 v1.7.1 [e6cf234a] + RandomNumbers v1.6.0 [c1ae055f] + RealDot v0.1.0 [3cdcf5f2] + RecipesBase v1.3.4 [189a3867] + Reexport v1.2.2 [ae029012] + Requires v1.3.1 [7e506255] + ScopedValues v1.3.0 [6c6a2e73] + Scratch v1.2.1 [91c51154] + SentinelArrays v1.4.8 [efcf1570] + Setfield v1.1.2 [605ecd9f] + ShowCases v0.1.0 [699a6c99] + SimpleTraits v0.9.4 [a2af1166] + SortingAlgorithms v1.2.1 [dc90abb0] + SparseInverseSubset v0.1.2 [276daf66] + SpecialFunctions v2.5.1 [171d559e] + SplittablesBase v0.1.15 [90137ffa] + StaticArrays v1.9.13 [1e83bf80] + StaticArraysCore v1.4.3 [82ae8749] + StatsAPI v1.7.1 [2913bbd2] + StatsBase v0.34.5 [892a3eda] + StringManipulation v0.4.1 [09ab397b] + StructArrays v0.7.1 [3783bdb8] + TableTraits v1.0.1 [bd369af6] + Tables v1.12.1 [e689c965] + Tracy v0.1.4 [28d57a85] + Transducers v0.4.84 [013be700] + UnsafeAtomics v0.3.0 [81def892] + VersionParsing v1.3.0 ⌅ [29a6e085] + Wavelets v0.9.5 ⌅ [e88e6eb3] + Zygote v0.6.77 [700de1a5] + ZygoteRules v0.2.7 [02a925ec] + cuDNN v1.4.3 [4ee394cb] + CUDA_Driver_jll v0.13.0+0 [76a88914] + CUDA_Runtime_jll v0.17.0+0 [62b44479] + CUDNN_jll v9.10.0+0 [f5851436] + FFTW_jll v3.3.11+0 [1d5cc7b8] + IntelOpenMP_jll v2025.0.4+0 [9c1d0b0a] + JuliaNVTXCallbacks_jll v0.2.1+0 [dad2f222] + LLVMExtra_jll v0.0.36+0 [ad6e5548] + LibTracyClient_jll v0.9.1+6 [856f044c] + MKL_jll v2025.0.1+1 [e98f9f5b] + NVTX_jll v3.1.1+0 [efe28fd5] + OpenSpecFun_jll v0.5.6+0 [1e29f10c] + demumble_jll v1.3.0+0 [1317d2d5] + oneTBB_jll v2022.0.0+0 [0dad84c5] + ArgTools v1.1.1 [56f22d72] + Artifacts [2a0f44e3] + Base64 [ade2ca70] + Dates [8ba89e20] + Distributed [f43a241f] + Downloads v1.6.0 [7b1f6079] + FileWatching [9fa8497b] + Future [b77e0a4c] + InteractiveUtils [4af54fe1] + LazyArtifacts [b27032c2] + LibCURL v0.6.4 [76f85450] + LibGit2 [8f399da3] + Libdl [37e2e46d] + LinearAlgebra [56ddb016] + Logging [d6f4376e] + Markdown [a63ad114] + Mmap [ca575930] + NetworkOptions v1.2.0 [44cfe95a] + Pkg v1.10.0 [de0858da] + Printf [3fa0cd96] + REPL [9a3f8284] + Random [ea8e919c] + SHA v0.7.0 [9e88b42a] + Serialization [1a1011a3] + SharedArrays [6462fe0b] + Sockets [2f01184e] + SparseArrays v1.10.0 [10745b16] + Statistics v1.10.0 [4607b0f0] + SuiteSparse [fa267f1f] + TOML v1.0.3 [a4e569a6] + Tar v1.10.0 [8dfed614] + Test [cf7118a7] + UUIDs [4ec0a83e] + Unicode [e66e0078] + CompilerSupportLibraries_jll v1.1.1+0 [deac9b47] + LibCURL_jll v8.4.0+0 [e37daf67] + LibGit2_jll v1.6.4+0 [29816b5a] + LibSSH2_jll v1.11.0+1 [c8ffd9c3] + MbedTLS_jll v2.28.2+1 [14a3606d] + MozillaCACerts_jll v2023.1.10 [4536629a] + OpenBLAS_jll v0.3.23+4 [05823500] + OpenLibm_jll v0.8.5+0 [bea87d4a] + SuiteSparse_jll v7.2.1+1 [83775a58] + Zlib_jll v1.2.13+1 [8e850b90] + libblastrampoline_jll v5.11.0+0 [8e850ede] + nghttp2_jll v1.52.0+1 [3f19e933] + p7zip_jll v17.4.0+2 Info Packages marked with ⌃ and ⌅ have new versions available. Those with ⌃ may be upgradable, but those with ⌅ are restricted by compatibility constraints from upgrading. To see why use `status --outdated -m` Building Conda ─→ `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/b19db3927f0db4151cb86d073689f2428e524576/build.log` Building PyCall → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/9816a3826b0ebf49ab4926e2b18842ad8b5c8f04/build.log` Installation completed after 72.97s ################################################################################ # Precompilation # Precompiling PkgEval dependencies... Precompiling package dependencies... Precompilation completed after 710.45s ################################################################################ # Testing # Testing InvertibleNetworks Status `/tmp/jl_8X1Yr4/Project.toml` [052768ef] CUDA v5.8.2 [d360d2e6] ChainRulesCore v1.25.1 ⌅ [587475ba] Flux v0.14.25 [b7115f24] InvertibleNetworks v2.3.0 ⌅ [bb331ad6] JOLI v0.8.5 [872c559c] NNlib v0.9.30 [a759f4b9] TimerOutputs v0.5.29 ⌅ [29a6e085] Wavelets v0.9.5 [02a925ec] cuDNN v1.4.3 [37e2e46d] LinearAlgebra [9a3f8284] Random [10745b16] Statistics v1.10.0 [8dfed614] Test Status `/tmp/jl_8X1Yr4/Manifest.toml` [621f4979] AbstractFFTs v1.5.0 [7f219486] AbstractNFFTs v0.8.2 [7d9f7c33] Accessors v0.1.42 [79e6a3ab] Adapt v4.3.0 [66dad0bd] AliasTables v1.1.3 [dce04be8] ArgCheck v2.5.0 [a9b6321e] Atomix v1.1.1 [ab4f0b2a] BFloat16s v0.5.1 [198e06fe] BangBang v0.4.4 [9718e550] Baselet v0.1.1 [26cce99e] BasicInterpolators v0.7.1 [fa961155] CEnum v0.5.0 [052768ef] CUDA v5.8.2 [1af6417a] CUDA_Runtime_Discovery v0.3.5 [082447d4] ChainRules v1.72.4 [d360d2e6] ChainRulesCore v1.25.1 [3da002f7] ColorTypes v0.12.1 [5ae59095] Colors v0.13.1 [bbf7d656] CommonSubexpressions v0.3.1 [34da2185] Compat v4.16.0 [a33af91c] CompositionsBase v0.1.2 [8f4d0f93] Conda v1.10.2 [187b0558] ConstructionBase v1.5.8 [6add18c4] ContextVariablesX v0.1.3 [a8cc5b0e] Crayons v4.1.1 ⌅ [717857b8] DSP v0.7.10 [9a962f9c] DataAPI v1.16.0 [a93c6f00] DataFrames v1.7.0 [864edb3b] DataStructures v0.18.22 [e2d170a0] DataValueInterfaces v1.0.0 [244e2a9f] DefineSingletons v0.1.2 [8bb1440f] DelimitedFiles v1.9.1 [163ba53b] DiffResults v1.1.0 [b552c78f] DiffRules v1.15.1 [aaf54ef3] DistributedArrays v0.6.7 [ffbed154] DocStringExtensions v0.9.4 [e2ba6199] ExprTools v0.1.10 [7a1cc6ca] FFTW v1.9.0 [cc61a311] FLoops v0.2.2 [b9860ae5] FLoopsBase v0.1.1 [1a297f60] FillArrays v1.13.0 [53c48c17] FixedPointNumbers v0.8.5 ⌅ [587475ba] Flux v0.14.25 [f6369f11] ForwardDiff v1.0.1 ⌅ [d9f16b24] Functors v0.4.12 [0c68f7d7] GPUArrays v11.2.2 [46192b85] GPUArraysCore v0.2.0 [61eb1bfa] GPUCompiler v1.5.1 [096a3bc2] GPUToolbox v0.2.0 [076d061b] HashArrayMappedTries v0.2.0 [7869d1d1] IRTools v0.4.14 [22cec73e] InitialValues v0.3.1 [842dd82b] InlineStrings v1.4.3 [505f98c9] InplaceOps v0.3.0 [18e54dd8] IntegerMathUtils v0.1.2 [3587e190] InverseFunctions v0.1.17 [41ab1584] InvertedIndices v1.3.1 [b7115f24] InvertibleNetworks v2.3.0 [92d709cd] IrrationalConstants v0.2.4 [c8e1da08] IterTools v1.10.0 [42fd0dbc] IterativeSolvers v0.9.4 [82899510] IteratorInterfaceExtensions v1.0.0 [692b3bcd] JLLWrappers v1.7.0 ⌅ [bb331ad6] JOLI v0.8.5 [682c06a0] JSON v0.21.4 [b14d175d] JuliaVariables v0.2.4 [63c18a36] KernelAbstractions v0.9.34 [929cbde3] LLVM v9.4.0 [8b046642] LLVMLoopInfo v1.0.0 [b964fa9f] LaTeXStrings v1.4.0 [2ab3a3ac] LogExpFunctions v0.3.29 [c2834f40] MLCore v1.0.0 ⌃ [7e8f7934] MLDataDevices v1.5.3 [d8e11817] MLStyle v0.4.17 [f1d291b0] MLUtils v0.4.8 [1914dd2f] MacroTools v0.5.16 [c03570c3] Memoize v0.4.4 [128add7d] MicroCollections v0.2.0 [e1d29d7a] Missings v1.2.0 [efe261a4] NFFT v0.13.7 [872c559c] NNlib v0.9.30 [5da4648a] NVTX v1.0.0 [77ba4419] NaNMath v1.1.3 [71a1bf82] NameResolution v0.1.5 [4d1e1d77] Nullables v1.0.0 [0b1bfda6] OneHotArrays v0.2.10 ⌅ [3bd65402] Optimisers v0.3.4 [bac558e1] OrderedCollections v1.8.1 [69de0a69] Parsers v2.8.3 [f27b6e38] Polynomials v4.0.19 [2dfb63ee] PooledArrays v1.4.3 ⌅ [aea7be01] PrecompileTools v1.2.1 [21216c6a] Preferences v1.4.3 [8162dcfd] PrettyPrint v0.2.0 [08abe8d2] PrettyTables v2.4.0 [27ebfcd6] Primes v0.5.7 [33c8b6b6] ProgressLogging v0.1.4 [43287f4e] PtrArrays v1.3.0 [438e738f] PyCall v1.96.4 [74087812] Random123 v1.7.1 [e6cf234a] RandomNumbers v1.6.0 [c1ae055f] RealDot v0.1.0 [3cdcf5f2] RecipesBase v1.3.4 [189a3867] Reexport v1.2.2 [ae029012] Requires v1.3.1 [7e506255] ScopedValues v1.3.0 [6c6a2e73] Scratch v1.2.1 [91c51154] SentinelArrays v1.4.8 [efcf1570] Setfield v1.1.2 [605ecd9f] ShowCases v0.1.0 [699a6c99] SimpleTraits v0.9.4 [a2af1166] SortingAlgorithms v1.2.1 [dc90abb0] SparseInverseSubset v0.1.2 [276daf66] SpecialFunctions v2.5.1 [171d559e] SplittablesBase v0.1.15 [90137ffa] StaticArrays v1.9.13 [1e83bf80] StaticArraysCore v1.4.3 [82ae8749] StatsAPI v1.7.1 [2913bbd2] StatsBase v0.34.5 [892a3eda] StringManipulation v0.4.1 [09ab397b] StructArrays v0.7.1 [3783bdb8] TableTraits v1.0.1 [bd369af6] Tables v1.12.1 [a759f4b9] TimerOutputs v0.5.29 [e689c965] Tracy v0.1.4 [28d57a85] Transducers v0.4.84 [013be700] UnsafeAtomics v0.3.0 [81def892] VersionParsing v1.3.0 ⌅ [29a6e085] Wavelets v0.9.5 ⌅ [e88e6eb3] Zygote v0.6.77 [700de1a5] ZygoteRules v0.2.7 [02a925ec] cuDNN v1.4.3 [4ee394cb] CUDA_Driver_jll v0.13.0+0 [76a88914] CUDA_Runtime_jll v0.17.0+0 [62b44479] CUDNN_jll v9.10.0+0 [f5851436] FFTW_jll v3.3.11+0 [1d5cc7b8] IntelOpenMP_jll v2025.0.4+0 [9c1d0b0a] JuliaNVTXCallbacks_jll v0.2.1+0 [dad2f222] LLVMExtra_jll v0.0.36+0 [ad6e5548] LibTracyClient_jll v0.9.1+6 [856f044c] MKL_jll v2025.0.1+1 [e98f9f5b] NVTX_jll v3.1.1+0 [efe28fd5] OpenSpecFun_jll v0.5.6+0 [1e29f10c] demumble_jll v1.3.0+0 [1317d2d5] oneTBB_jll v2022.0.0+0 [0dad84c5] ArgTools v1.1.1 [56f22d72] Artifacts [2a0f44e3] Base64 [ade2ca70] Dates [8ba89e20] Distributed [f43a241f] Downloads v1.6.0 [7b1f6079] FileWatching [9fa8497b] Future [b77e0a4c] InteractiveUtils [4af54fe1] LazyArtifacts [b27032c2] LibCURL v0.6.4 [76f85450] LibGit2 [8f399da3] Libdl [37e2e46d] LinearAlgebra [56ddb016] Logging [d6f4376e] Markdown [a63ad114] Mmap [ca575930] NetworkOptions v1.2.0 [44cfe95a] Pkg v1.10.0 [de0858da] Printf [3fa0cd96] REPL [9a3f8284] Random [ea8e919c] SHA v0.7.0 [9e88b42a] Serialization [1a1011a3] SharedArrays [6462fe0b] Sockets [2f01184e] SparseArrays v1.10.0 [10745b16] Statistics v1.10.0 [4607b0f0] SuiteSparse [fa267f1f] TOML v1.0.3 [a4e569a6] Tar v1.10.0 [8dfed614] Test [cf7118a7] UUIDs [4ec0a83e] Unicode [e66e0078] CompilerSupportLibraries_jll v1.1.1+0 [deac9b47] LibCURL_jll v8.4.0+0 [e37daf67] LibGit2_jll v1.6.4+0 [29816b5a] LibSSH2_jll v1.11.0+1 [c8ffd9c3] MbedTLS_jll v2.28.2+1 [14a3606d] MozillaCACerts_jll v2023.1.10 [4536629a] OpenBLAS_jll v0.3.23+4 [05823500] OpenLibm_jll v0.8.5+0 [bea87d4a] SuiteSparse_jll v7.2.1+1 [83775a58] Zlib_jll v1.2.13+1 [8e850b90] libblastrampoline_jll v5.11.0+0 [8e850ede] nghttp2_jll v1.52.0+1 [3f19e933] p7zip_jll v17.4.0+2 Info Packages marked with ⌃ and ⌅ have new versions available. Those with ⌃ may be upgradable, but those with ⌅ are restricted by compatibility constraints from upgrading. Testing Running tests... Gradient test mse loss 216.98633; 22.839432 114.20288; 5.709999 58.52881; 1.4276314 29.621094; 0.35712624 14.899902; 0.08920765 7.472412; 0.022142887 Gradient test log likelihood 49.979248; 6.120369 26.519775; 1.5300331 13.642456; 0.3824482 6.916931; 0.09552097 3.4822388; 0.023987293 1.7471924; 0.0059206486 Gradient test sequential network: input 202.74756; 93.41455 124.76465; 23.316406 68.19653; 5.843994 35.529053; 1.4912109 18.133057; 0.3770752 Gradient test sequential network: parameters 75.58472; 79.49957 21.843506; 23.800934 5.517334; 6.4960475 1.1904297; 1.6797864 0.18359375; 0.42827216 Jacobian test 120.2537; 22.813396 58.558975; 7.012685 28.92853; 2.2621448 14.3773575; 0.7489061 7.1690745; 0.24536462 Gradient test convolutions 0.002793163; 1.4018966e-5 0.0014001131; 3.4779077e-6 0.0007009357; 8.5978536e-7 0.00035069883; 1.9892468e-7 0.00017541647; 3.2407115e-8 8.7723136e-5; 1.3023964e-9 Gradient test convolutions 8.263428; 0.04150963 4.1421204; 0.01034832 2.0736694; 0.002564907 1.0375061; 0.0006110668 0.5189514; 0.00010716915 0.25952148; 7.8082085e-6 WARNING: Method definition objective(Any, Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_nnlib_convolution.jl:69 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_nnlib_convolution.jl:136. Gradient test convolutions 0.00072783604; 3.6484562e-6 0.00036483258; 9.096693e-7 0.00018264726; 2.2386666e-7 9.138137e-5; 5.4191332e-8 4.5713037e-5; 4.7439244e-9 2.2858381e-5; 5.0931703e-10 Gradient test convolutions 168.90039; 0.8508911 84.66113; 0.21450806 42.38379; 0.054031372 21.205078; 0.013832092 10.604492; 0.004962921 5.3027344; 0.0019931793 Gradient test leaky ReLU 0.13728374; 0.010124326 0.07105678; 0.002647251 0.036165893; 0.00068612397 0.018257618; 0.00016839057 0.009168804; 4.420057e-5 WARNING: Method definition objective(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:27 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:69. Gradient test ReLU 0.13385373; 0.010811284 0.06958032; 0.0027521923 0.03546393; 0.00070232525 0.017901242; 0.00018188544 0.008997142; 4.4421293e-5 WARNING: Method definition objective(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:69 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:116. Gradient test Sigmoid 0.0317145; 0.0016641803 0.016273364; 0.00041597523 0.00824067; 0.00010399986 0.0041463524; 2.5982503e-5 0.0020796806; 6.4868946e-6 WARNING: Method definition objective(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:116 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:163. Gradient test scaled and shifted Sigmoid 0.007743612; 0.00040639378 0.0039734095; 0.00010159332 0.0020121; 2.5401358e-5 0.0010123849; 6.3658226e-6 0.0005077757; 1.5996629e-6 WARNING: Method definition objective(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:163 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:206. Gradient test GaLU 0.05914882; 0.003095623 0.03034839; 0.00077383034 0.015367687; 0.00019342359 0.0077321827; 4.837243e-5 0.003878206; 1.207157e-5 WARNING: Method definition objective(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:206 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:250. Gradient test Sigmoid2 herehere34.186325; 0.2620659 here17.175415; 0.04878044 here8.601929; 0.010169029 here4.303772; 0.0022768974 here2.1524658; 0.00055861473 WARNING: Method definition objective(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:250 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_activations.jl:298. Gradient test ExpClamp 1.0627627; 0.056252837 0.5454397; 0.014068067 0.27623606; 0.0035178363 0.13899755; 0.000879392 0.06971884; 0.00021963567 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_sequential.jl:67 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_flux.jl:22. ┌ Warning: This type should probably now use `Flux.@layer` instead of `@functor`: ActNorm │ caller = ip:0x0 └ @ Core :-1 ┌ Warning: Assignment to `grads` in soft scope is ambiguous because a global variable by the same name exists: `grads` will be treated as a new local. Disambiguate by using `local grads` to suppress this warning or `global grads` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_flux.jl:41 [ Info: Loss: 4.6742767e-5 [ Info: Loss: 0.00016299656 [ Info: Loss: 0.000344626 [ Info: Loss: 0.00058013824 [ Info: Loss: 0.0008538514 Test Summary: | Pass Total Time Basics | 104 104 9m35.2s Test test_utils/test_objectives.jl | 4 4 20.3s Test test_utils/test_sequential.jl | 13 13 3m50.6s Test test_utils/test_nnlib_convolution.jl | 10 10 16.1s Test test_utils/test_activations.jl | 19 19 42.5s Test test_utils/test_squeeze.jl | 19 19 1m14.7s Test test_utils/test_jacobian.jl | 2 2 1.7s Test test_utils/test_chainrules.jl | 2 2 2m29.1s Test test_utils/test_flux.jl | 35 35 39.7s ┌ Warning: Assignment to `X` in soft scope is ambiguous because a global variable by the same name exists: `X` will be treated as a new local. Disambiguate by using `local X` to suppress this warning or `global X` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:22 ┌ Warning: Assignment to `X0` in soft scope is ambiguous because a global variable by the same name exists: `X0` will be treated as a new local. Disambiguate by using `local X0` to suppress this warning or `global X0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:23 ┌ Warning: Assignment to `dX` in soft scope is ambiguous because a global variable by the same name exists: `dX` will be treated as a new local. Disambiguate by using `local dX` to suppress this warning or `global dX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:24 ┌ Warning: Assignment to `Y` in soft scope is ambiguous because a global variable by the same name exists: `Y` will be treated as a new local. Disambiguate by using `local Y` to suppress this warning or `global Y` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:49 ┌ Warning: Assignment to `f0` in soft scope is ambiguous because a global variable by the same name exists: `f0` will be treated as a new local. Disambiguate by using `local f0` to suppress this warning or `global f0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:64 ┌ Warning: Assignment to `ΔX` in soft scope is ambiguous because a global variable by the same name exists: `ΔX` will be treated as a new local. Disambiguate by using `local ΔX` to suppress this warning or `global ΔX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:64 ┌ Warning: Assignment to `h` in soft scope is ambiguous because a global variable by the same name exists: `h` will be treated as a new local. Disambiguate by using `local h` to suppress this warning or `global h` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:65 ┌ Warning: Assignment to `maxiter` in soft scope is ambiguous because a global variable by the same name exists: `maxiter` will be treated as a new local. Disambiguate by using `local maxiter` to suppress this warning or `global maxiter` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:66 ┌ Warning: Assignment to `err1` in soft scope is ambiguous because a global variable by the same name exists: `err1` will be treated as a new local. Disambiguate by using `local err1` to suppress this warning or `global err1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:67 ┌ Warning: Assignment to `err2` in soft scope is ambiguous because a global variable by the same name exists: `err2` will be treated as a new local. Disambiguate by using `local err2` to suppress this warning or `global err2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:68 ┌ Warning: Assignment to `err3` in soft scope is ambiguous because a global variable by the same name exists: `err3` will be treated as a new local. Disambiguate by using `local err3` to suppress this warning or `global err3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:88 ┌ Warning: Assignment to `err4` in soft scope is ambiguous because a global variable by the same name exists: `err4` will be treated as a new local. Disambiguate by using `local err4` to suppress this warning or `global err4` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:89 ┌ Warning: Assignment to `err5` in soft scope is ambiguous because a global variable by the same name exists: `err5` will be treated as a new local. Disambiguate by using `local err5` to suppress this warning or `global err5` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:112 ┌ Warning: Assignment to `err6` in soft scope is ambiguous because a global variable by the same name exists: `err6` will be treated as a new local. Disambiguate by using `local err6` to suppress this warning or `global err6` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:113 ┌ Warning: Assignment to `θ` in soft scope is ambiguous because a global variable by the same name exists: `θ` will be treated as a new local. Disambiguate by using `local θ` to suppress this warning or `global θ` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:141 ┌ Warning: Assignment to `dθ` in soft scope is ambiguous because a global variable by the same name exists: `dθ` will be treated as a new local. Disambiguate by using `local dθ` to suppress this warning or `global dθ` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:145 ┌ Warning: Assignment to `dY` in soft scope is ambiguous because a global variable by the same name exists: `dY` will be treated as a new local. Disambiguate by using `local dY` to suppress this warning or `global dY` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:149 ┌ Warning: Assignment to `dY_` in soft scope is ambiguous because a global variable by the same name exists: `dY_` will be treated as a new local. Disambiguate by using `local dY_` to suppress this warning or `global dY_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:173 ┌ Warning: Assignment to `dX_` in soft scope is ambiguous because a global variable by the same name exists: `dX_` will be treated as a new local. Disambiguate by using `local dX_` to suppress this warning or `global dX_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:174 ┌ Warning: Assignment to `dθ_` in soft scope is ambiguous because a global variable by the same name exists: `dθ_` will be treated as a new local. Disambiguate by using `local dθ_` to suppress this warning or `global dθ_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:174 ┌ Warning: Assignment to `a` in soft scope is ambiguous because a global variable by the same name exists: `a` will be treated as a new local. Disambiguate by using `local a` to suppress this warning or `global a` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:175 ┌ Warning: Assignment to `b` in soft scope is ambiguous because a global variable by the same name exists: `b` will be treated as a new local. Disambiguate by using `local b` to suppress this warning or `global b` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_residual_block.jl:176 Testing activation InvertibleNetworks.ActivationFunction(InvertibleNetworks.ReLU, nothing, InvertibleNetworks.ReLUgrad) Gradient test convolutions 0.045305222; 0.0014301948 0.023012549; 0.00034981035 0.01159358; 9.003002e-5 0.005819738; 2.2596214e-5 0.0029147267; 3.731111e-6 Gradient test convolutions 83.95273; 5.8751755 43.409363; 1.5045891 22.072662; 0.38431358 11.1315; 0.096987724 5.589905; 0.024339199 Gradient test convolutions 29.625458; 0.75169086 14.947876; 0.24069834 7.534088; 0.060199022 3.7821655; 0.014978051 1.8947754; 0.0037963986 Jacobian test 10.057255; 2.4782062 5.0147734; 0.8396366 2.50392; 0.28040528 1.2519965; 0.09685614 0.62561893; 0.032558538 Testing activation InvertibleNetworks.ActivationFunction(InvertibleNetworks.LeakyReLU, InvertibleNetworks.LeakyReLUinv, InvertibleNetworks.LeakyReLUgrad) Gradient test convolutions 0.017455742; 0.0005532373 0.008866176; 0.00013829768 0.004467666; 3.4569297e-5 0.0022424608; 8.622417e-6 0.0011233389; 2.097222e-6 Gradient test convolutions 91.18118; 8.325516 47.628906; 2.124443 24.341125; 0.53554916 12.304169; 0.13416862 6.185547; 0.033621788 Gradient test convolutions 22.616955; 4.0246267 12.298904; 1.0218863 6.4029922; 0.25740314 3.2655869; 0.06461084 1.6489105; 0.016188323 Jacobian test 3.4693124; 1.0245962 1.7671852; 0.3426302 0.8923431; 0.12186899 0.44741184; 0.044567224 0.22405875; 0.01658354 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_flux.jl:22 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_flux_block.jl:37. Gradient test convolutions 0.4241469; 0.022323519 0.21765423; 0.0055809766 0.1102221; 0.0013955012 0.055459738; 0.00034906343 0.02781725; 8.7151304e-5 Gradient test convolutions 0.2627449; 0.028196186 0.13842177; 0.007048771 0.07097316; 0.0017621145 0.035927534; 0.00044010207 0.018073797; 0.00011002086 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_flux_block.jl:37 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_resnet.jl:28. Gradient test 1.3812375; 0.07774472 0.7101741; 0.01931703 0.35956573; 0.0051798224 0.18117714; 0.0011956394 0.09086418; 0.0003222078 Gradient test convolutions 5.132864; 3.0607657 1.6170692; 0.5810201 0.6470165; 0.12899196 0.28948402; 0.030471742 0.1371479; 0.0076417625 WARNING: Method definition loss(Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_utils/test_chainrules.jl:49 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_layer_conv1x1.jl:97. Gradient test ΔX 126.54053; 0.63575745 63.429688; 0.1584549 31.754395; 0.039676666 15.887695; 0.009340286 7.9467773; 0.0017404556 Gradient test Δv1 (dot(Δv1, Δv1), dot(Δv2, Δv2), dot(Δv3, Δv3)) = (212595.77f0, 3.4147162f6, 1.8465046f6) 4.1030273; 0.13858986 2.0874023; 0.033406258 1.0522461; 0.008158207 0.5283203; 0.0018818378 0.26464844; 0.00045263767 WARNING: Method definition loss(Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_layer_conv1x1.jl:97 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_layer_conv1x1.jl:158. Gradient test ΔX 64.52051; 0.6362152 32.419434; 0.15892792 16.249023; 0.040157318 8.134766; 0.009824753 4.069824; 0.0024709702 Gradient test Δv1 50.98633; 3.204895 26.3042; 0.79141235 13.351074; 0.19673157 6.7248535; 0.049049377 3.3740234; 0.012928009 Jacobian test 44.56233; 7.934799 22.304296; 1.9899396 11.131738; 0.49690723 5.5575395; 0.12406407 2.7762816; 0.030985385 1.3874655; 0.007745586 0.6935638; 0.0019362196 Jacobian (inverse) test 44.725784; 8.003789 22.385923; 2.0073106 11.172319; 0.50125474 5.577747; 0.12515168 2.7863612; 0.0312582 1.3924991; 0.007814421 0.6960792; 0.0019540677 Gradient test coupling layer 305.74512; 17.10617 157.11646; 4.309189 79.606445; 1.1063766 40.05591; 0.30050278 20.104492; 0.0737133 10.072266; 0.01683712 Gradient test coupling layer 3.204895; 3.907508 0.79730225; 1.1486087 0.1638794; 0.3395326 0.008850098; 0.09667671 WARNING: Method definition loss(Any, Any, Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_basic.jl:93 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_basic.jl:164. Gradient test coupling layer 1053.634; 50.55182 540.49365; 11.599274 273.9165; 2.129959 137.6377; 0.3855362 68.9126; 0.0990181 34.461914; 0.043893814 Gradient test coupling layer 63.8468; 12.1818695 34.778687; 3.235649 18.250183; 0.7569847 9.398926; 0.10465813 Jacobian test 6.6104364; 0.8859867 3.2506096; 0.29435062 1.6108418; 0.09653996 0.7999984; 0.032375693 0.39905566; 0.013352332 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_resnet.jl:28 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_irim.jl:40. Gradient test invertible layer 2404.5254; 126.58569 1233.8984; 31.657104 624.8711; 7.9066772 314.41602; 1.9728699 157.70215; 0.4922943 Gradient test invertible layer 11.240784; 3.1740193 6.341858; 0.8655436 3.3774185; 0.22628224 1.7436256; 0.058224678 0.8863716; 0.014553547 Gradient test invertible layer 0.98441315; 0.009997129 0.49370193; 0.006493926 0.24530792; 0.001703918 0.122306824; 0.00050482154 Jacobian test 11.276266; 0.67187417 5.639334; 0.18685128 2.8196445; 0.052825127 1.4096178; 0.014610171 0.70469093; 0.0036081967 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_irim.jl:40 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:37. Gradient test coupling layer 320.45508; 20.107758 259.5005; 12.949799 209.63013; 8.330093 169.04736; 5.320801 136.09619; 3.398346 109.43579; 2.1598434 ┌ Warning: Assignment to `X` in soft scope is ambiguous because a global variable by the same name exists: `X` will be treated as a new local. Disambiguate by using `local X` to suppress this warning or `global X` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:88 ┌ Warning: Assignment to `Y` in soft scope is ambiguous because a global variable by the same name exists: `Y` will be treated as a new local. Disambiguate by using `local Y` to suppress this warning or `global Y` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:89 ┌ Warning: Assignment to `L02` in soft scope is ambiguous because a global variable by the same name exists: `L02` will be treated as a new local. Disambiguate by using `local L02` to suppress this warning or `global L02` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:91 ┌ Warning: Assignment to `Lini` in soft scope is ambiguous because a global variable by the same name exists: `Lini` will be treated as a new local. Disambiguate by using `local Lini` to suppress this warning or `global Lini` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:92 ┌ Warning: Assignment to `dW1` in soft scope is ambiguous because a global variable by the same name exists: `dW1` will be treated as a new local. Disambiguate by using `local dW1` to suppress this warning or `global dW1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:93 ┌ Warning: Assignment to `dW2` in soft scope is ambiguous because a global variable by the same name exists: `dW2` will be treated as a new local. Disambiguate by using `local dW2` to suppress this warning or `global dW2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:94 ┌ Warning: Assignment to `dW3` in soft scope is ambiguous because a global variable by the same name exists: `dW3` will be treated as a new local. Disambiguate by using `local dW3` to suppress this warning or `global dW3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:95 ┌ Warning: Assignment to `f0` in soft scope is ambiguous because a global variable by the same name exists: `f0` will be treated as a new local. Disambiguate by using `local f0` to suppress this warning or `global f0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `ΔX` in soft scope is ambiguous because a global variable by the same name exists: `ΔX` will be treated as a new local. Disambiguate by using `local ΔX` to suppress this warning or `global ΔX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `Δv1` in soft scope is ambiguous because a global variable by the same name exists: `Δv1` will be treated as a new local. Disambiguate by using `local Δv1` to suppress this warning or `global Δv1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `Δv2` in soft scope is ambiguous because a global variable by the same name exists: `Δv2` will be treated as a new local. Disambiguate by using `local Δv2` to suppress this warning or `global Δv2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `Δv3` in soft scope is ambiguous because a global variable by the same name exists: `Δv3` will be treated as a new local. Disambiguate by using `local Δv3` to suppress this warning or `global Δv3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `ΔW1` in soft scope is ambiguous because a global variable by the same name exists: `ΔW1` will be treated as a new local. Disambiguate by using `local ΔW1` to suppress this warning or `global ΔW1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `ΔW2` in soft scope is ambiguous because a global variable by the same name exists: `ΔW2` will be treated as a new local. Disambiguate by using `local ΔW2` to suppress this warning or `global ΔW2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `ΔW3` in soft scope is ambiguous because a global variable by the same name exists: `ΔW3` will be treated as a new local. Disambiguate by using `local ΔW3` to suppress this warning or `global ΔW3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:97 ┌ Warning: Assignment to `h` in soft scope is ambiguous because a global variable by the same name exists: `h` will be treated as a new local. Disambiguate by using `local h` to suppress this warning or `global h` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:98 ┌ Warning: Assignment to `maxiter` in soft scope is ambiguous because a global variable by the same name exists: `maxiter` will be treated as a new local. Disambiguate by using `local maxiter` to suppress this warning or `global maxiter` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:99 ┌ Warning: Assignment to `err3` in soft scope is ambiguous because a global variable by the same name exists: `err3` will be treated as a new local. Disambiguate by using `local err3` to suppress this warning or `global err3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:100 ┌ Warning: Assignment to `err4` in soft scope is ambiguous because a global variable by the same name exists: `err4` will be treated as a new local. Disambiguate by using `local err4` to suppress this warning or `global err4` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:101 ┌ Warning: Assignment to `factor1` in soft scope is ambiguous because a global variable by the same name exists: `factor1` will be treated as a new local. Disambiguate by using `local factor1` to suppress this warning or `global factor1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:115 ┌ Warning: Assignment to `factor2` in soft scope is ambiguous because a global variable by the same name exists: `factor2` will be treated as a new local. Disambiguate by using `local factor2` to suppress this warning or `global factor2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:116 Gradient test coupling layer 54.896973; 2.3750763 44.57715; 1.2404938 36.09729; 0.5568218 29.128418; 0.1948719 Gradient test coupling layer 48.322083; 5.9425783 39.685303; 3.726427 32.424744; 2.3046408 26.363464; 1.420042 Gradient test coupling layer 38.48828; 6.380453 31.804565; 4.0904226 26.095947; 2.6200433 21.285645; 1.6871481 ┌ Warning: Assignment to `X` in soft scope is ambiguous because a global variable by the same name exists: `X` will be treated as a new local. Disambiguate by using `local X` to suppress this warning or `global X` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:132 ┌ Warning: Assignment to `Y` in soft scope is ambiguous because a global variable by the same name exists: `Y` will be treated as a new local. Disambiguate by using `local Y` to suppress this warning or `global Y` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:133 ┌ Warning: Assignment to `L01` in soft scope is ambiguous because a global variable by the same name exists: `L01` will be treated as a new local. Disambiguate by using `local L01` to suppress this warning or `global L01` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:134 ┌ Warning: Assignment to `Lini` in soft scope is ambiguous because a global variable by the same name exists: `Lini` will be treated as a new local. Disambiguate by using `local Lini` to suppress this warning or `global Lini` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:135 ┌ Warning: Assignment to `dv1` in soft scope is ambiguous because a global variable by the same name exists: `dv1` will be treated as a new local. Disambiguate by using `local dv1` to suppress this warning or `global dv1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:136 ┌ Warning: Assignment to `dv2` in soft scope is ambiguous because a global variable by the same name exists: `dv2` will be treated as a new local. Disambiguate by using `local dv2` to suppress this warning or `global dv2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:137 ┌ Warning: Assignment to `dv3` in soft scope is ambiguous because a global variable by the same name exists: `dv3` will be treated as a new local. Disambiguate by using `local dv3` to suppress this warning or `global dv3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:138 ┌ Warning: Assignment to `f0` in soft scope is ambiguous because a global variable by the same name exists: `f0` will be treated as a new local. Disambiguate by using `local f0` to suppress this warning or `global f0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `ΔX` in soft scope is ambiguous because a global variable by the same name exists: `ΔX` will be treated as a new local. Disambiguate by using `local ΔX` to suppress this warning or `global ΔX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `Δv1` in soft scope is ambiguous because a global variable by the same name exists: `Δv1` will be treated as a new local. Disambiguate by using `local Δv1` to suppress this warning or `global Δv1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `Δv2` in soft scope is ambiguous because a global variable by the same name exists: `Δv2` will be treated as a new local. Disambiguate by using `local Δv2` to suppress this warning or `global Δv2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `Δv3` in soft scope is ambiguous because a global variable by the same name exists: `Δv3` will be treated as a new local. Disambiguate by using `local Δv3` to suppress this warning or `global Δv3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `ΔW1` in soft scope is ambiguous because a global variable by the same name exists: `ΔW1` will be treated as a new local. Disambiguate by using `local ΔW1` to suppress this warning or `global ΔW1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `ΔW2` in soft scope is ambiguous because a global variable by the same name exists: `ΔW2` will be treated as a new local. Disambiguate by using `local ΔW2` to suppress this warning or `global ΔW2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `ΔW3` in soft scope is ambiguous because a global variable by the same name exists: `ΔW3` will be treated as a new local. Disambiguate by using `local ΔW3` to suppress this warning or `global ΔW3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:140 ┌ Warning: Assignment to `h` in soft scope is ambiguous because a global variable by the same name exists: `h` will be treated as a new local. Disambiguate by using `local h` to suppress this warning or `global h` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:141 ┌ Warning: Assignment to `maxiter` in soft scope is ambiguous because a global variable by the same name exists: `maxiter` will be treated as a new local. Disambiguate by using `local maxiter` to suppress this warning or `global maxiter` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:142 ┌ Warning: Assignment to `err5` in soft scope is ambiguous because a global variable by the same name exists: `err5` will be treated as a new local. Disambiguate by using `local err5` to suppress this warning or `global err5` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:143 ┌ Warning: Assignment to `err6` in soft scope is ambiguous because a global variable by the same name exists: `err6` will be treated as a new local. Disambiguate by using `local err6` to suppress this warning or `global err6` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:144 ┌ Warning: Assignment to `factor1` in soft scope is ambiguous because a global variable by the same name exists: `factor1` will be treated as a new local. Disambiguate by using `local factor1` to suppress this warning or `global factor1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:158 ┌ Warning: Assignment to `factor2` in soft scope is ambiguous because a global variable by the same name exists: `factor2` will be treated as a new local. Disambiguate by using `local factor2` to suppress this warning or `global factor2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:159 Gradient test coupling layer 2.8132324; 12.162975 4.7470703; 7.2338963 5.10791; 4.476864 4.8378906; 2.8299294 Gradient test coupling layer 14.329346; 10.669937 13.53418; 6.4652405 12.255371; 3.744156 10.699707; 2.0999298 Gradient test coupling layer 12.859131; 69.345276 7.2209473; 52.409912 3.946045; 40.097183 2.02417; 30.945099 Jacobian test 16.323702; 4.674478 13.1139345; 3.2213886 10.519557; 2.227901 8.431119; 1.5376899 6.7572203; 1.0598322 5.416492; 0.729236 4.337446; 0.49880704 Gradient test coupling layer 9.891983; 0.6115942 5.0983887; 0.15339994 2.5874863; 0.03840804 1.3033295; 0.009617686 0.6540756; 0.0023979545 0.32764435; 0.00059244037 Gradient test coupling layer 5.050728; 0.27723122 2.5948792; 0.06910038 1.3147392; 0.017250538 0.66168976; 0.0043051243 Gradient test weights for permute=none, reverse=true, logdet=true 2.368805; 1.2567723 1.6032715; 0.20951712 0.84698486; 0.05940944 0.4371338; 0.016063362 0.2210083; 0.005590275 Gradient test ΔX for permute=none, reverse=true, logdet=true 14.13501; 11.885458 4.1634216; 3.0386455 1.3605957; 0.79820764 0.48223877; 0.20104474 0.19232178; 0.05172476 Gradient test ΔX for permute=none, reverse=true, logdet=true 7.784546; 11.165292 1.1627197; 2.8530927 0.10144043; 0.7437461 0.23120117; 0.1913921 0.1616211; 0.04967554 Gradient test ΔX for permute=none, reverse=true, logdet=true 12.332336; 11.690671 3.1960144; 2.8751817 0.93362427; 0.7732079 0.2960205; 0.21581233 0.09725952; 0.05715543 Gradient test weights for permute=none, reverse=false, logdet=true 0.44430542; 0.3119377 0.17175293; 0.10556906 0.07897949; 0.045887556 0.028778076; 0.012232108 0.01083374; 0.0025607562 Gradient test ΔX for permute=none, reverse=false, logdet=true 3.6669006; 4.3880105 0.761322; 1.121877 0.100097656; 0.2803751 0.01864624; 0.07149248 0.026611328; 0.018458031 Gradient test ΔX for permute=none, reverse=false, logdet=true 4.4631653; 3.5825386 1.3358765; 0.89556307 0.44360352; 0.22344682 0.1619873; 0.051908955 0.068481445; 0.01344227 Gradient test ΔX for permute=none, reverse=false, logdet=true 4.204132; 3.9644701 1.0759583; 0.9561273 0.27011108; 0.21019562 0.0736084; 0.043650664 0.025390625; 0.010411757 Gradient test weights for permute=none, reverse=true, logdet=false 8.953348; 9.9907875 7.026106; 2.445962 4.132532; 0.6035018 2.2085457; 0.15947127 1.1428928; 0.04111564 Gradient test ΔX for permute=none, reverse=true, logdet=false 29.165527; 23.790709 8.803711; 6.1163015 2.960205; 1.6165003 1.0814819; 0.40962952 0.44213867; 0.10621247 Gradient test ΔX for permute=none, reverse=true, logdet=false 13.404419; 18.974258 2.0375977; 4.822517 0.12365723; 1.2688024 0.37298584; 0.32324398 0.26348877; 0.08462614 Gradient test ΔX for permute=none, reverse=true, logdet=false 23.749512; 20.967873 6.5480957; 5.157276 2.0905762; 1.3951664 0.73950195; 0.39179704 0.27972412; 0.10587166 Gradient test weights for permute=none, reverse=false, logdet=false 2.385327; 2.6270292 1.8618262; 0.64435196 1.09165; 0.16143906 0.58538723; 0.041157305 0.3030343; 0.010237962 Gradient test ΔX for permute=none, reverse=false, logdet=false 6.438385; 8.756319 1.0460205; 2.2049878 0.038604736; 0.5408789 0.15390015; 0.13584167 0.11010742; 0.034763485 Gradient test ΔX for permute=none, reverse=false, logdet=false 11.090912; 10.521312 2.9595947; 2.6747944 0.8081665; 0.6657664 0.23501587; 0.16381583 0.07757568; 0.04197566 Gradient test ΔX for permute=none, reverse=false, logdet=false 9.323303; 10.342289 1.9953918; 2.5048845 0.31680298; 0.5715493 0.0004272461; 0.12694593 0.03466797; 0.029018618 Gradient test weights for permute=lower, reverse=true, logdet=true 7.325592; 4.854825 5.0294495; 1.0607591 2.796051; 0.24905324 1.4597168; 0.062835336 0.74716187; 0.014114201 Gradient test ΔX for permute=lower, reverse=true, logdet=true 14.134949; 11.885398 4.163452; 3.0386767 1.3605957; 0.79820794 0.48233032; 0.20113644 0.19244385; 0.051846907 Gradient test ΔX for permute=lower, reverse=true, logdet=true 7.784668; 11.165413 1.1626587; 2.8530312 0.10116577; 0.74402046 0.23104858; 0.19154453 0.16159058; 0.049705982 Gradient test ΔX for permute=lower, reverse=true, logdet=true 12.332184; 11.690516 3.1958618; 2.8750284 0.9335022; 0.7730855 0.29580688; 0.21559851 0.09713745; 0.057033263 Gradient test weights for permute=lower, reverse=false, logdet=true 2.492859; 1.2575078 1.6025391; 0.27264428 0.86572266; 0.071869016 0.45108032; 0.017715514 0.22729492; 0.007102996 Gradient test ΔX for permute=lower, reverse=false, logdet=true 4.1612854; 4.0570865 1.0841675; 1.0320681 0.29229736; 0.2662477 0.081085205; 0.06806036 0.023864746; 0.017352324 Gradient test ΔX for permute=lower, reverse=false, logdet=true 4.625244; 3.9207382 1.3451538; 0.99290085 0.42642212; 0.25029564 0.14685059; 0.05878734 0.059570312; 0.015538689 Gradient test ΔX for permute=lower, reverse=false, logdet=true 2.595459; 2.509299 0.66519165; 0.6221117 0.17071533; 0.14917536 0.044311523; 0.033541538 0.013458252; 0.008073258 Gradient test weights for permute=lower, reverse=true, logdet=false 2.474319; 2.0979023 1.7043066; 0.58180404 0.99484634; 0.14820898 0.5359955; 0.035532176 0.27501392; 0.010749906 Gradient test ΔX for permute=lower, reverse=true, logdet=false 29.165527; 23.79071 8.803589; 6.1161804 2.960205; 1.6165009 1.0814819; 0.40962982 0.4420166; 0.106090546 Gradient test ΔX for permute=lower, reverse=true, logdet=false 13.404419; 18.974257 2.0375977; 4.8225164 0.12365723; 1.2688022 0.37298584; 0.32324386 0.26348877; 0.08462608 Gradient test ΔX for permute=lower, reverse=true, logdet=false 23.749512; 20.96787 6.548218; 5.1573973 2.0906982; 1.3952879 0.73950195; 0.39179677 0.27972412; 0.10587153 Gradient test weights for permute=lower, reverse=false, logdet=false 0.7211319; 0.59562516 0.49473166; 0.16364688 0.2837956; 0.045393676 0.15347719; 0.011117443 0.07921767; 0.0030796453 Gradient test ΔX for permute=lower, reverse=false, logdet=false 7.427109; 8.094427 1.6917572; 2.0254161 0.3457489; 0.51257837 0.04536438; 0.12877913 0.009094238; 0.032613136 Gradient test ΔX for permute=lower, reverse=false, logdet=false 11.415009; 11.197651 2.9781494; 2.8694704 0.7737732; 0.71943367 0.20462036; 0.17745061 0.0597229; 0.046138026 Gradient test ΔX for permute=lower, reverse=false, logdet=false 6.10614; 7.4321303 1.1741486; 1.8371437 0.11810303; 0.44960055 0.058685303; 0.10706346 0.058532715; 0.024341665 Gradient test weights for permute=both, reverse=true, logdet=true 2.4151306; 2.10353 1.8240356; 0.43529463 1.0346985; 0.09496665 0.53900146; 0.025831103 0.274292; 0.008124292 Gradient test ΔX for permute=both, reverse=true, logdet=true 12.21814; 12.152563 3.117157; 3.0843687 0.7748108; 0.75841665 0.18734741; 0.17915036 0.05291748; 0.048818953 Gradient test ΔX for permute=both, reverse=true, logdet=true 12.6828; 15.004396 2.59198; 3.752778 0.36904907; 0.94944805 0.05316162; 0.23703787 0.08151245; 0.06358729 Gradient test ΔX for permute=both, reverse=true, logdet=true 13.559601; 9.383762 4.744995; 2.6570761 1.6263123; 0.58235276 0.6442566; 0.12227684 0.29769897; 0.0367091 Gradient test weights for permute=both, reverse=false, logdet=true 1.329895; 1.267164 1.048523; 0.25000656 0.6157837; 0.03348106 0.32333374; 0.0012986362 0.16113281; 0.0011833757 Gradient test ΔX for permute=both, reverse=false, logdet=true 3.1001587; 2.9637327 0.796051; 0.72783804 0.21832275; 0.18421628 0.06378174; 0.0467285 0.01852417; 0.0099975495 Gradient test ΔX for permute=both, reverse=false, logdet=true 5.705017; 3.13612 2.1201782; 0.8357297 0.8708191; 0.22859484 0.38049316; 0.059381038 0.17630005; 0.015743986 Gradient test ΔX for permute=both, reverse=false, logdet=true 3.4647827; 3.6067467 0.81085205; 0.881834 0.19442749; 0.22991845 0.036987305; 0.05473279 0.0031738281; 0.01204657 Gradient test weights for permute=both, reverse=true, logdet=false 1.6433454; 2.1860068 1.3435507; 0.5711254 0.8041215; 0.15321654 0.4347868; 0.04388222 0.22629738; 0.01303713 Gradient test ΔX for permute=both, reverse=true, logdet=false 21.444092; 22.141104 5.2724915; 5.6209974 1.2105103; 1.3847632 0.23760986; 0.32473636 0.044067383; 0.08763063 Gradient test ΔX for permute=both, reverse=true, logdet=false 24.77356; 28.538267 5.285095; 7.167449 0.88964844; 1.8308254 0.010498047; 0.46009046 0.11016846; 0.1251258 Gradient test ΔX for permute=both, reverse=true, logdet=false 26.256042; 16.452938 9.6622925; 4.7607408 3.468567; 1.017791 1.4263916; 0.20100367 0.67407227; 0.0613783 Gradient test weights for permute=both, reverse=false, logdet=false 0.5848496; 0.62487864 0.4503528; 0.15451133 0.26216817; 0.04026389 0.14090538; 0.01031065 0.072466135; 0.00314188 Gradient test ΔX for permute=both, reverse=false, logdet=false 9.192505; 8.091491 2.5539856; 2.0034785 0.7760925; 0.500839 0.26480103; 0.12717427 0.099121094; 0.030307718 Gradient test ΔX for permute=both, reverse=false, logdet=false 12.00235; 7.743035 4.139984; 2.0103266 1.5904236; 0.52559483 0.66574097; 0.13332659 0.30010986; 0.033902675 Gradient test ΔX for permute=both, reverse=false, logdet=false 7.7928925; 9.52825 1.448822; 2.316501 0.17274475; 0.6065842 0.063583374; 0.15333633 0.07249451; 0.035965346 Gradient test weights for permute=full, reverse=true, logdet=true 2.9067688; 2.5906467 2.187561; 0.56114674 1.2306519; 0.14370203 0.6508484; 0.036328554 0.33377075; 0.009817719 Gradient test ΔX for permute=full, reverse=true, logdet=true 14.135193; 11.88564 4.1636963; 3.03892 1.3608398; 0.79845166 0.4824829; 0.20128882 0.19256592; 0.051968873 Gradient test ΔX for permute=full, reverse=true, logdet=true 7.784546; 11.165293 1.1626587; 2.853032 0.10128784; 0.7438988 0.23104858; 0.19154474 0.16140747; 0.049889192 Gradient test ΔX for permute=full, reverse=true, logdet=true 12.332336; 11.69067 3.195984; 2.8751504 0.9336548; 0.77323806 0.29592896; 0.2157206 0.09732056; 0.05721638 Gradient test weights for permute=full, reverse=false, logdet=true 1.329895; 1.267164 1.048523; 0.25000656 0.6157837; 0.03348106 0.32333374; 0.0012986362 0.16113281; 0.0011833757 Gradient test ΔX for permute=full, reverse=false, logdet=true 3.1001892; 2.963763 0.7961426; 0.7279295 0.21838379; 0.18427722 0.06387329; 0.046820007 0.01852417; 0.009997529 Gradient test ΔX for permute=full, reverse=false, logdet=true 5.7052307; 3.1363342 2.1201782; 0.83572996 0.8708801; 0.228656 0.3805542; 0.059442133 0.17642212; 0.015866086 Gradient test ΔX for permute=full, reverse=false, logdet=true 3.4647522; 3.6067164 0.810791; 0.8817732 0.19442749; 0.22991857 0.036956787; 0.054702327 0.0031433105; 0.012016079 Gradient test weights for permute=full, reverse=true, logdet=false 2.3083425; 2.3058472 1.7551966; 0.55189824 1.0049238; 0.14862359 0.53777313; 0.03900057 0.27929783; 0.009089023 Gradient test ΔX for permute=full, reverse=true, logdet=false 29.165466; 23.790646 8.803528; 6.1161175 2.960144; 1.616439 1.0814209; 0.40956837 0.44195557; 0.1060293 Gradient test ΔX for permute=full, reverse=true, logdet=false 13.40448; 18.97432 2.0375977; 4.8225174 0.12365723; 1.2688026 0.37298584; 0.3232441 0.26348877; 0.0846262 Gradient test ΔX for permute=full, reverse=true, logdet=false 23.749512; 20.967869 6.548279; 5.157458 2.0907593; 1.3953488 0.739563; 0.3918577 0.27978516; 0.10593252 Gradient test weights for permute=full, reverse=false, logdet=false 0.5848496; 0.6248785 0.4503528; 0.15451127 0.26216817; 0.04026386 0.14090538; 0.010310635 0.072466135; 0.0031418726 Gradient test ΔX for permute=full, reverse=false, logdet=false 9.192505; 8.09149 2.5539856; 2.003478 0.7760925; 0.50083876 0.26480103; 0.12717415 0.099121094; 0.030307658 Gradient test ΔX for permute=full, reverse=false, logdet=false 12.00235; 7.743035 4.140045; 2.0103877 1.5904236; 0.52559483 0.665802; 0.13338763 0.3001709; 0.03396371 Gradient test ΔX for permute=full, reverse=false, logdet=false 7.7928925; 9.528251 1.448822; 2.3165011 0.17277527; 0.6066148 0.063583374; 0.1533364 0.07249451; 0.035965383 Jacobian test 6.2894845; 1.2738092 3.146785; 0.45088485 1.5710182; 0.16587965 0.7847091; 0.058722984 0.39220816; 0.02033361 Gradient test coupling layer 351.9048; 19.520477 180.84375; 4.868881 91.67114; 1.185173 46.135498; 0.29265976 23.142822; 0.07125664 11.590088; 0.016951561 Gradient test coupling layer 76.5188; 18.449917 42.30017; 5.184187 22.3396; 1.4025793 11.527832; 0.34325743 Gradient test coupling layer 8.69165; 1.1126515 4.079468; 0.2899683 1.9661865; 0.07143679 0.96069336; 0.013318494 WARNING: Method definition mean(Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:7 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_hint.jl:7. WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_glow.jl:37 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_hint.jl:53. Conditional HINT invertibility test with permute=true, logdet=true, reverse=true Conditional HINT gradient test for input with permute=true, logdet=true, reverse=true 307.27734; 488.61847 146.61328; 291.68616 42.234375; 158.2927 6.8984375; 85.94821 28.261719; 46.015606 741.5; 433.88895 527.70703; 281.61816 366.84375; 169.97266 261.1797; 103.682816 187.65625; 61.658752 69.65625; 588.7802 83.79297; 442.95624 73.015625; 348.38373 82.82031; 254.29916 75.77734; 193.91824 Conditional HINT gradient test for weights with permute=true, logdet=true, reverse=true 2928.5234; 4067.4312 178.61719; 748.071 403.53125; 118.80435 112.69531; 29.668137 32.21875; 38.962975 5958.547; 5522.8867 575.2578; 357.4278 13.953125; 122.86814 152.39844; 206.85594 214.61719; 241.84595 4288.828; 3438.421 1768.8359; 1343.6323 822.8281; 610.2263 348.25; 241.9491 162.8125; 109.66205 Conditional HINT invertibility test with permute=true, logdet=true, reverse=false Conditional HINT gradient test for input with permute=true, logdet=true, reverse=false 7.3481445; 10.404114 4.257324; 6.7021 2.3149414; 4.2707615 1.1411133; 2.7057695 0.45361328; 1.7053382 8.515137; 8.036439 5.4365234; 5.053566 3.4726562; 3.16629 2.2426758; 1.9975829 1.4467773; 1.250703 15.729492; 10.240671 11.030273; 6.639217 7.8408203; 4.3279753 5.631836; 2.8215597 4.0961914; 1.8479705 Conditional HINT gradient test for weights with permute=true, logdet=true, reverse=false 34.58008; 30.37849 9.578125; 7.477331 2.46875; 1.4183531 0.74609375; 0.22089529 0.20800781; 0.054591417 26.871582; 24.506798 7.131836; 5.9494443 1.9804688; 1.3892729 0.71191406; 0.41631612 0.1899414; 0.042142436 3.666504; 31.839502 9.138672; 4.9478273 7.40625; 0.3630004 4.9404297; 1.4188049 3.2104492; 1.4496368 Conditional HINT invertibility test with permute=false, logdet=true, reverse=true Conditional HINT gradient test for input with permute=false, logdet=true, reverse=true 671.4375; 2301.745 342.8125; 1647.0586 130.96875; 1174.3657 62.140625; 772.5769 169.01562; 498.75842 2738.8594; 1906.7361 1887.0156; 1221.317 1288.3906; 755.8317 873.90625; 447.85913 588.9844; 248.14665 967.40625; 1190.7986 601.9219; 780.6358 290.26562; 433.2368 114.46875; 228.84567 43.21875; 134.72029 Conditional HINT gradient test for weights with permute=false, logdet=true, reverse=true 12232.055; 14150.607 2496.0938; 3455.3704 396.6797; 876.318 18.203125; 221.61601 68.48047; 51.4291 5327.5273; 11782.888 388.21484; 2839.4653 1156.5586; 457.2815 973.9336; 167.01355 591.3125; 187.85248 29209.496; 21125.803 9289.711; 5247.8643 3558.4883; 1537.5648 1679.7461; 669.28436 914.2578; 409.02695 Conditional HINT invertibility test with permute=false, logdet=true, reverse=false Conditional HINT gradient test for input with permute=false, logdet=true, reverse=false 13.243652; 13.815769 8.580078; 9.037771 5.5805664; 5.946721 3.6049805; 3.8979044 2.3364258; 2.570765 5.2929688; 12.841813 2.133789; 8.172865 0.3955078; 5.2267685 0.49853516; 3.3664734 0.95751953; 2.1344874 17.960938; 12.322996 12.469727; 7.9593744 8.8203125; 5.212031 6.3085938; 3.4219685 4.5634766; 2.2541761 Conditional HINT gradient test for weights with permute=false, logdet=true, reverse=false 31.389648; 40.339943 5.359375; 9.834523 0.25097656; 2.4885507 0.46826172; 0.65052533 0.42089844; 0.13849509 37.478516; 34.603603 9.819824; 8.382368 3.015625; 2.296897 1.3291016; 0.9697375 0.82177734; 0.6420953 36.82715; 43.253754 7.169922; 10.383224 0.29248047; 1.8991314 0.8955078; 0.09218234 0.9536133; 0.5519506 Conditional HINT invertibility test with permute=true, logdet=false, reverse=true Conditional HINT gradient test for input with permute=true, logdet=false, reverse=true 1141.75; 1629.5151 837.0469; 1227.259 530.2969; 842.4666 332.53125; 582.267 175.25; 375.0386 1244.4844; 992.2443 925.0469; 723.2548 596.71875; 435.2851 377.42188; 248.27495 230.92188; 127.60434 1052.5781; 2295.5361 509.75; 1504.1165 211.29688; 1006.79004 37.578125; 673.97266 81.3125; 427.80313 Conditional HINT gradient test for weights with permute=true, logdet=false, reverse=true 2756.3594; 15030.479 1369.8906; 4767.169 1517.8594; 1550.6704 1230.6562; 303.60864 710.4844; 56.64807 35461.72; 12917.398 16986.375; 5714.215 7449.203; 1813.123 3405.9531; 587.9131 1552.7188; 143.69873 5377.125; 10481.573 6956.9375; 972.4116 5231.75; 1267.0754 3629.5312; 1647.194 2615.4531; 1624.2844 Conditional HINT invertibility test with permute=true, logdet=false, reverse=false Conditional HINT gradient test for input with permute=true, logdet=false, reverse=false 19.44165; 26.46093 11.451416; 17.066841 6.5043945; 10.996734 3.4711914; 7.065063 1.6237793; 4.4988766 17.555908; 20.637024 10.606445; 13.071338 6.2470703; 8.218985 3.621338; 5.198869 2.0234375; 3.2854624 39.128418; 24.516361 27.639893; 15.950246 19.729736; 10.378019 14.177246; 6.695873 10.312256; 4.3271565 Conditional HINT gradient test for weights with permute=true, logdet=false, reverse=false 97.16406; 71.291405 32.059326; 19.122997 10.667969; 4.199805 4.022217; 0.7881348 1.6799316; 0.06289065 70.77954; 70.50941 17.199219; 17.06415 3.0021973; 2.934663 0.17651367; 0.2102808 1.192627; 1.2095106 27.424805; 82.211914 11.284912; 16.108643 12.822266; 0.8745117 9.457031; 2.6086426 6.5964355; 3.1722412 Conditional HINT invertibility test with permute=false, logdet=false, reverse=true Conditional HINT gradient test for input with permute=false, logdet=false, reverse=true 1335.2812; 4597.335 680.8125; 3290.4556 258.9375; 2346.652 126.375; 1543.7966 339.5; 996.6373 5473.9375; 3809.0986 3771.625; 2439.7537 2575.0625; 1509.5656 1746.625; 894.2274 1177.3125; 495.3944 1926.6875; 2375.9895 1197.8125; 1557.254 575.84375; 863.39703 225.40625; 455.44888 83.84375; 267.87787 Conditional HINT gradient test for weights with permute=false, logdet=false, reverse=true 24451.023; 28306.945 4982.9062; 6910.8667 787.46875; 1751.449 39.523438; 442.4667 138.57031; 102.42476 10612.0; 23575.941 798.4375; 5683.5327 2324.211; 916.7742 1952.5312; 332.0387 1184.0859; 373.83966 58372.695; 42259.61 18552.703; 10496.159 7102.7266; 3074.4546 3353.2812; 1339.1453 1825.9375; 818.8695 Conditional HINT invertibility test with permute=false, logdet=false, reverse=false Conditional HINT gradient test for input with permute=false, logdet=false, reverse=false 34.546875; 32.605423 22.800781; 21.247622 15.181641; 13.939112 10.125244; 9.131222 6.855713; 6.0604944 15.369629; 31.016878 7.3112793; 19.829079 2.7495117; 12.763752 0.26757812; 8.27897 1.0844727; 5.3246408 43.323975; 28.656773 30.0896; 18.355837 21.260498; 11.873489 15.2561035; 7.746496 11.076172; 5.068486 Conditional HINT gradient test for weights with permute=false, logdet=false, reverse=false 87.03931; 112.350174 15.570557; 28.225992 1.076416; 7.404134 1.2263184; 1.9375405 1.1340332; 0.44789624 108.76904; 95.875275 31.363037; 24.916151 9.857422; 6.633979 4.090332; 2.4786105 2.2563477; 1.4504869 97.47339; 110.38462 19.168457; 25.624071 0.8503418; 4.078149 2.0083008; 0.39439714 2.3134766; 1.5065248 Conditional HINT jacobian test with permute=true, logdet=true 19.220694; 6.5345316 9.372641; 2.187312 4.6465373; 0.7686729 2.3158216; 0.28033975 1.1558608; 0.09980874 Conditional HINT jacobian test with permute=false, logdet=true 17.469196; 3.857861 8.326562; 1.2412885 4.069782; 0.41852033 2.0103474; 0.1380963 0.9997722; 0.046031166 Conditional HINT jacobian test with permute=true, logdet=false 18.749294; 5.9120197 9.167557; 1.9367977 4.5475388; 0.6332978 2.2663913; 0.20675974 1.1311547; 0.072152674 Conditional HINT jacobian test with permute=false, logdet=false 13.85919; 3.401138 6.6301928; 1.1358013 3.2432723; 0.39246503 1.6047163; 0.13719784 0.79776454; 0.048769027 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_hint.jl:53 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_res_block.jl:21. Gradient test convolutions 0.10099465; 0.01145719 0.05337113; 0.0028547868 0.027392566; 0.0007203929 0.013874173; 0.0001823064 0.0069827437; 4.549604e-5 Gradient test convolutions 0.15976954; 0.043523204 0.08993995; 0.011706417 0.04776448; 0.0030587045 0.024626613; 0.00078497967 0.012504876; 0.00020092027 Jacobian test 17.961573; 4.492205 8.714661; 1.509011 4.2998977; 0.52324 2.1360626; 0.18638109 1.0642132; 0.06844468 WARNING: Method definition loss(Any, Any, Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_basic.jl:164 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_hyperbolic_layer.jl:36. Invertibility test hyperbolic layer with action=1 Gradient test hyperbolic layer input with action=1 503.43863; 503.43863 125.85779; 125.85779 31.46389; 31.46389 7.8660526; 7.8660526 1.9665724; 1.9665724 0.4916737; 0.4916737 0.12291897; 0.12291897 0.030729882; 0.030729882 0.0076825228; 0.0076825228 0.0019206405; 0.0019206405 Gradient test hyperbolic layer weights with action=1 726.5061; 726.5061 109.59151; 109.59151 23.338224; 23.338224 5.63111; 5.63111 1.4014579; 1.4014579 0.35138813; 0.35138813 0.08822897; 0.08822897 0.022032365; 0.022032365 0.0055028433; 0.0055028433 0.0013750521; 0.0013750521 Invertibility test hyperbolic layer with action=-1 Gradient test hyperbolic layer input with action=-1 558.8499; 558.8499 139.72687; 139.72687 34.93243; 34.93243 8.733023; 8.733023 2.1832714; 2.1832714 0.5458219; 0.5458219 0.1364557; 0.1364557 0.034113787; 0.034113787 0.008528486; 0.008528486 0.002132124; 0.002132124 Gradient test hyperbolic layer weights with action=-1 759.62683; 759.62683 117.76739; 117.76739 25.491236; 25.491236 6.17219; 6.17219 1.540301; 1.540301 0.38569427; 0.38569427 0.096541874; 0.096541874 0.024178293; 0.024178293 0.0060452614; 0.0060452614 0.001511471; 0.001511471 Invertibility test hyperbolic layer with action=0 Gradient test hyperbolic layer input with action=0 527.9051; 527.9051 131.9753; 131.9753 32.99281; 32.99281 8.247385; 8.247385 2.0616956; 2.0616956 0.51540357; 0.51540357 0.12885086; 0.12885086 0.03221271; 0.03221271 0.008053166; 0.008053166 0.0020132854; 0.0020132854 Gradient test hyperbolic layer weights with action=0 727.3004; 727.3004 111.424866; 111.424866 23.552282; 23.552282 5.62395; 5.62395 1.3905133; 1.3905133 0.34624904; 0.34624904 0.08655291; 0.08655291 0.021625923; 0.021625923 0.0054067336; 0.0054067336 0.0013516665; 0.0013516665 Jacobian test 31.395208; 0.67913556 15.749694; 0.20344266 7.88853; 0.06486708 3.9479053; 0.021687996 1.9748187; 0.007440424 Jacobian test 32.853966; 0.4753818 16.435541; 0.15083764 8.21972; 0.050856747 4.11039; 0.017604033 2.0553403; 0.0067258854 Jacobian test 32.241665; 0.7035909 16.149662; 0.21853217 8.082465; 0.07067407 4.0431204; 0.024140524 2.0220902; 0.008537666 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_res_block.jl:21 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_actnorm.jl:112. Gradient test actnorm 593.0364; 31.212463 304.3213; 7.803131 154.11157; 1.9506378 77.54346; 0.487648 38.89331; 0.122241974 19.477295; 0.030481339 Gradient test actnorm 2.277193; 6.986334 0.61058044; 1.74399 0.7415428; 0.43574238 0.47982788; 0.10881472 0.26706696; 0.027254343 0.1403656; 0.0067950487 Gradient test actnorm reverse 581.8613; 30.624023 298.5869; 7.6557617 151.20776; 1.9135742 76.082275; 0.47839355 38.16089; 0.1194458 19.110352; 0.029815674 Gradient test actnorm reverse 10.052361; 0.118299484 4.998604; 0.031573296 2.491684; 0.008168697 1.2439346; 0.002177 0.62132263; 0.00044381618 0.31058502; 0.00014561415 Jacobian test 8.177256; 0.5802178 4.101183; 0.1450545 2.0546827; 0.036263626 1.0284821; 0.00906591 0.514541; 0.0022664294 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_actnorm.jl:112 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_layer_affine.jl:45. Gradient test affine layer 0.9472656; 0.034610093 0.48242188; 0.008515984 0.24316406; 0.002304867 0.12207031; 0.0006641522 0.061523438; 0.00015620515 Gradient test affine layer 216.86084; 16.338135 112.51489; 4.0845947 57.278564; 1.0211792 28.894287; 0.25558472 14.510986; 0.063949585 Jacobian test 1.2265664; 0.08353404 0.602034; 0.020883504 0.29830196; 0.0052208737 0.1484848; 0.0013052188 0.07407744; 0.00032630298 Test Summary: | Pass Total Time Layers | 576 576 3m24.3s Test test_layers/test_residual_block.jl | 18 18 3.9s Test test_layers/test_flux_block.jl | 4 4 7.1s Test test_layers/test_resnet.jl | 4 4 34.3s Test test_layers/test_layer_conv1x1.jl | 29 29 5.3s Test test_layers/test_coupling_layer_basic.jl | 29 29 4.5s Test test_layers/test_coupling_layer_irim.jl | 11 11 2.2s Test test_layers/test_coupling_layer_glow.jl | 17 17 48.9s Test test_layers/test_coupling_layer_hint.jl | 163 163 16.0s Test test_layers/test_conditional_layer_glow.jl | 8 8 1.2s Test test_layers/test_conditional_layer_hint.jl | 84 84 31.4s Test test_layers/test_conditional_res_block.jl | 7 7 13.2s Test test_layers/test_hyperbolic_layer.jl | 159 159 16.0s Test test_layers/test_actnorm.jl | 32 32 8.3s Test test_layers/test_layer_affine.jl | 11 11 12.0s Gradient test loop unrolling 847.2373; 46.51297 435.24365; 11.631485 220.5293; 2.9082718 110.9917; 0.7270851 55.678223; 0.18116951 27.884766; 0.044930458 Gradient test loop unrolling 240.05078; 20.002798 114.87012; 4.8461256 56.203613; 1.1916176 27.797363; 0.29136544 13.824707; 0.07170811 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_layer_affine.jl:45 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_generator.jl:72. Gradient test loop unrolling 12.51416; 1.4956894 6.628414; 0.37651062 3.4077988; 0.09466362 1.7275848; 0.023646355 Gradient test loop unrolling 1.6518745; 0.1902467 0.6915779; 0.03923601 0.35837936; 0.0070275962 0.18094254; 0.0017609447 WARNING: Method definition mean(Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_hint.jl:7 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:9. ┌ Warning: Assignment to `X` in soft scope is ambiguous because a global variable by the same name exists: `X` will be treated as a new local. Disambiguate by using `local X` to suppress this warning or `global X` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:36 ┌ Warning: Assignment to `Y` in soft scope is ambiguous because a global variable by the same name exists: `Y` will be treated as a new local. Disambiguate by using `local Y` to suppress this warning or `global Y` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:40 ┌ Warning: Assignment to `X_` in soft scope is ambiguous because a global variable by the same name exists: `X_` will be treated as a new local. Disambiguate by using `local X_` to suppress this warning or `global X_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:44 ┌ Warning: Assignment to `X0` in soft scope is ambiguous because a global variable by the same name exists: `X0` will be treated as a new local. Disambiguate by using `local X0` to suppress this warning or `global X0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:89 ┌ Warning: Assignment to `dX` in soft scope is ambiguous because a global variable by the same name exists: `dX` will be treated as a new local. Disambiguate by using `local dX` to suppress this warning or `global dX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:90 ┌ Warning: Assignment to `f0` in soft scope is ambiguous because a global variable by the same name exists: `f0` will be treated as a new local. Disambiguate by using `local f0` to suppress this warning or `global f0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:92 ┌ Warning: Assignment to `ΔX` in soft scope is ambiguous because a global variable by the same name exists: `ΔX` will be treated as a new local. Disambiguate by using `local ΔX` to suppress this warning or `global ΔX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:92 ┌ Warning: Assignment to `h` in soft scope is ambiguous because a global variable by the same name exists: `h` will be treated as a new local. Disambiguate by using `local h` to suppress this warning or `global h` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:93 ┌ Warning: Assignment to `maxiter` in soft scope is ambiguous because a global variable by the same name exists: `maxiter` will be treated as a new local. Disambiguate by using `local maxiter` to suppress this warning or `global maxiter` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:94 ┌ Warning: Assignment to `err1` in soft scope is ambiguous because a global variable by the same name exists: `err1` will be treated as a new local. Disambiguate by using `local err1` to suppress this warning or `global err1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:95 ┌ Warning: Assignment to `err2` in soft scope is ambiguous because a global variable by the same name exists: `err2` will be treated as a new local. Disambiguate by using `local err2` to suppress this warning or `global err2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:96 ┌ Warning: Assignment to `num_attempts` in soft scope is ambiguous because a global variable by the same name exists: `num_attempts` will be treated as a new local. Disambiguate by using `local num_attempts` to suppress this warning or `global num_attempts` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:110 ┌ Warning: Assignment to `results_1` in soft scope is ambiguous because a global variable by the same name exists: `results_1` will be treated as a new local. Disambiguate by using `local results_1` to suppress this warning or `global results_1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:111 ┌ Warning: Assignment to `results_2` in soft scope is ambiguous because a global variable by the same name exists: `results_2` will be treated as a new local. Disambiguate by using `local results_2` to suppress this warning or `global results_2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:112 ┌ Warning: Assignment to `dW` in soft scope is ambiguous because a global variable by the same name exists: `dW` will be treated as a new local. Disambiguate by using `local dW` to suppress this warning or `global dW` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:123 ┌ Warning: Assignment to `dv` in soft scope is ambiguous because a global variable by the same name exists: `dv` will be treated as a new local. Disambiguate by using `local dv` to suppress this warning or `global dv` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:124 ┌ Warning: Assignment to `err3` in soft scope is ambiguous because a global variable by the same name exists: `err3` will be treated as a new local. Disambiguate by using `local err3` to suppress this warning or `global err3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:129 ┌ Warning: Assignment to `err4` in soft scope is ambiguous because a global variable by the same name exists: `err4` will be treated as a new local. Disambiguate by using `local err4` to suppress this warning or `global err4` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:130 ┌ Warning: Assignment to `factor1` in soft scope is ambiguous because a global variable by the same name exists: `factor1` will be treated as a new local. Disambiguate by using `local factor1` to suppress this warning or `global factor1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:144 ┌ Warning: Assignment to `factor2` in soft scope is ambiguous because a global variable by the same name exists: `factor2` will be treated as a new local. Disambiguate by using `local factor2` to suppress this warning or `global factor2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:145 ┌ Warning: Assignment to `θ` in soft scope is ambiguous because a global variable by the same name exists: `θ` will be treated as a new local. Disambiguate by using `local θ` to suppress this warning or `global θ` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:160 ┌ Warning: Assignment to `θ0` in soft scope is ambiguous because a global variable by the same name exists: `θ0` will be treated as a new local. Disambiguate by using `local θ0` to suppress this warning or `global θ0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:162 ┌ Warning: Assignment to `dθ` in soft scope is ambiguous because a global variable by the same name exists: `dθ` will be treated as a new local. Disambiguate by using `local dθ` to suppress this warning or `global dθ` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:166 ┌ Warning: Assignment to `dY` in soft scope is ambiguous because a global variable by the same name exists: `dY` will be treated as a new local. Disambiguate by using `local dY` to suppress this warning or `global dY` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:172 ┌ Warning: Assignment to `err5` in soft scope is ambiguous because a global variable by the same name exists: `err5` will be treated as a new local. Disambiguate by using `local err5` to suppress this warning or `global err5` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:178 ┌ Warning: Assignment to `err6` in soft scope is ambiguous because a global variable by the same name exists: `err6` will be treated as a new local. Disambiguate by using `local err6` to suppress this warning or `global err6` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:179 ┌ Warning: Assignment to `dY_` in soft scope is ambiguous because a global variable by the same name exists: `dY_` will be treated as a new local. Disambiguate by using `local dY_` to suppress this warning or `global dY_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:196 ┌ Warning: Assignment to `dX_` in soft scope is ambiguous because a global variable by the same name exists: `dX_` will be treated as a new local. Disambiguate by using `local dX_` to suppress this warning or `global dX_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:197 ┌ Warning: Assignment to `dθ_` in soft scope is ambiguous because a global variable by the same name exists: `dθ_` will be treated as a new local. Disambiguate by using `local dθ_` to suppress this warning or `global dθ_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:197 ┌ Warning: Assignment to `a` in soft scope is ambiguous because a global variable by the same name exists: `a` will be treated as a new local. Disambiguate by using `local a` to suppress this warning or `global a` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:198 ┌ Warning: Assignment to `b` in soft scope is ambiguous because a global variable by the same name exists: `b` will be treated as a new local. Disambiguate by using `local b` to suppress this warning or `global b` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_glow.jl:199 Testing Glow with dimensions=(32, 32) logdet=true and split_scales=true Gradient test glow: input 17.462402; 0.6315689 8.974365; 0.5039482 4.5429688; 0.069664955 2.2896729; 0.010792017 Gradient test glow: input 9.431641; 0.5487257 7.60437; 0.49803746 6.0596924; 0.37462634 4.859619; 0.3115665 Gradient test glow: input 18.234863; 1.2553343 14.768066; 0.8240926 11.884644; 0.589084 9.658569; 0.32041216 Gradient test glow: input 15.759888; 0.82824194 12.810425; 0.46007967 10.392212; 0.22419113 8.379517; 0.1136058 Jacobian test 23.693167; 19.549408 11.132732; 6.0280585 5.544935; 1.9736346 2.7734976; 0.67495644 1.3899198; 0.24006963 Testing Glow with dimensions=(32, 32, 32) logdet=true and split_scales=true Gradient test glow: input 801.7539; 39.55957 410.08594; 8.071167 207.14844; 2.1161041 104.0625; 0.3764038 Gradient test glow: input 307.95703; 12.190819 248.6289; 7.4893866 200.20703; 4.6875877 160.9375; 2.9782043 Gradient test glow: input 335.3203; 13.993829 270.82812; 8.623206 218.13672; 5.4243526 175.32812; 3.5207198 Gradient test glow: input 495.6914; 24.064589 399.92578; 15.879027 322.1172; 10.526671 259.16406; 6.9510245 Jacobian test 97.04776; 52.07937 52.95613; 18.514 27.792141; 6.5133176 14.255469; 2.2976208 7.2207384; 0.81605494 Testing Glow with dimensions=(32, 32) logdet=true and split_scales=false Gradient test glow: input 106.02612; 14.731575 56.755432; 4.098301 29.432861; 1.0271149 14.960144; 0.23535919 Gradient test glow: input 0.33947754; 0.89753884 0.09631348; 0.54276246 0.03149414; 0.38865334 0.15039062; 0.436118 Gradient test glow: input 2.4110107; 1.2223033 1.6799316; 0.7289657 1.24646; 0.48568726 0.87927246; 0.27065432 Gradient test glow: input 22.810669; 0.9745245 19.184448; 0.15629244 15.61438; 0.39185524 12.599609; 0.42158937 Jacobian test 79.69341; 108.2786 48.253628; 37.41695 26.061678; 12.343957 13.430035; 3.993826 6.819765; 1.2973368 Testing Glow with dimensions=(32, 32, 32) logdet=true and split_scales=false Gradient test glow: input 4762.2812; 581.99756 2528.6523; 149.86743 1302.2051; 38.06714 660.65625; 9.507996 Gradient test glow: input 286.5039; 79.78258 239.22461; 53.804592 199.01172; 35.411633 165.02734; 22.511337 Gradient test glow: input 798.77344; 172.99612 662.4121; 115.003555 547.9199; 74.01259 451.42383; 46.12219 Gradient test glow: input 104.71289; 66.42775 96.43945; 40.47306 84.677734; 24.852276 72.73047; 14.893539 Jacobian test 211.40178; 133.2839 118.574005; 42.33533 62.53973; 13.445034 32.078316; 4.391089 16.240839; 1.4755403 Testing Glow with dimensions=(32, 32) logdet=false and split_scales=true Gradient test glow: input 0.08188984; 0.011012763 0.04361838; 0.0026302375 0.022481889; 0.00066454895 0.011409521; 0.00016988628 Gradient test glow: input 0.0026124418; 0.0024918588 0.0023212135; 0.0017622274 0.002096206; 0.0011705466 0.0018539131; 0.0007594888 Gradient test glow: input 0.022517473; 0.005002311 0.01865989; 0.003355937 0.015432358; 0.0021803037 0.012711167; 0.0013789618 Gradient test glow: input 0.008470774; 0.0013016039 0.006903976; 0.00091392547 0.005767107; 0.0004872149 0.0047272444; 0.00027621258 Jacobian test 23.693167; 19.549408 11.132732; 6.0280585 5.544935; 1.9736346 2.7734976; 0.67495644 1.3899198; 0.24006963 Testing Glow with dimensions=(32, 32, 32) logdet=false and split_scales=true Gradient test glow: input 0.081989855; 0.011007749 0.043799102; 0.0028467625 0.022620797; 0.0007254835 0.011491537; 0.00018306635 Gradient test glow: input 0.006269574; 0.00091650954 0.0051719546; 0.0005769129 0.004232049; 0.00036704497 0.0034452677; 0.00023400738 Gradient test glow: input 0.009379357; 0.0014600648 0.0077374876; 0.0009340505 0.0063405037; 0.00059672655 0.0051641166; 0.0003856673 Gradient test glow: input 0.01030612; 0.0013398599 0.00845021; 0.0008665747 0.0068983138; 0.0005551133 0.0056078434; 0.00035489854 Jacobian test 97.04776; 52.07937 52.95613; 18.514 27.792141; 6.5133176 14.255469; 2.2976208 7.2207384; 0.81605494 Testing Glow with dimensions=(32, 32) logdet=false and split_scales=false Gradient test glow: input 122.29492; 16.000671 65.25; 3.910923 33.677795; 1.0943298 17.125122; 0.29644203 Gradient test glow: input 11.555115; 1.2576394 9.153015; 1.097188 7.4520874; 0.74807525 6.0687866; 0.49134374 Gradient test glow: input 15.9210205; 3.4271924 12.4088745; 2.4138117 9.481628; 1.4855781 7.336548; 0.9397075 Gradient test glow: input 29.397339; 5.2088223 23.753296; 3.9316347 18.910522; 3.237422 15.342651; 2.375705 Jacobian test 79.69341; 108.2786 48.253628; 37.41695 26.061678; 12.343957 13.430035; 3.993826 6.819765; 1.2973368 Testing Glow with dimensions=(32, 32, 32) logdet=false and split_scales=false Gradient test glow: input 5365.117; 621.3506 2838.6074; 156.06006 1458.2383; 38.8136 738.79297; 9.572998 Gradient test glow: input 810.18164; 107.831604 663.9844; 70.42627 543.27344; 44.255104 441.48242; 28.540394 Gradient test glow: input 929.46875; 161.38318 763.1035; 109.578094 626.34375; 71.80149 512.8008; 45.71544 Gradient test glow: input 608.6621; 41.40074 496.03906; 24.011246 402.33984; 13.70039 324.81836; 8.01384 Jacobian test 211.40178; 133.2839 118.574005; 42.33533 62.53973; 13.445034 32.078316; 4.391089 16.240839; 1.4755403 ┌ Warning: Assignment to `N` in soft scope is ambiguous because a global variable by the same name exists: `N` will be treated as a new local. Disambiguate by using `local N` to suppress this warning or `global N` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:20 ┌ Warning: Assignment to `X` in soft scope is ambiguous because a global variable by the same name exists: `X` will be treated as a new local. Disambiguate by using `local X` to suppress this warning or `global X` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:24 ┌ Warning: Assignment to `Y` in soft scope is ambiguous because a global variable by the same name exists: `Y` will be treated as a new local. Disambiguate by using `local Y` to suppress this warning or `global Y` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:27 ┌ Warning: Assignment to `X_` in soft scope is ambiguous because a global variable by the same name exists: `X_` will be treated as a new local. Disambiguate by using `local X_` to suppress this warning or `global X_` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:28 ┌ Warning: Assignment to `X0` in soft scope is ambiguous because a global variable by the same name exists: `X0` will be treated as a new local. Disambiguate by using `local X0` to suppress this warning or `global X0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:65 ┌ Warning: Assignment to `dX` in soft scope is ambiguous because a global variable by the same name exists: `dX` will be treated as a new local. Disambiguate by using `local dX` to suppress this warning or `global dX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:66 ┌ Warning: Assignment to `f0` in soft scope is ambiguous because a global variable by the same name exists: `f0` will be treated as a new local. Disambiguate by using `local f0` to suppress this warning or `global f0` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:68 ┌ Warning: Assignment to `ΔX` in soft scope is ambiguous because a global variable by the same name exists: `ΔX` will be treated as a new local. Disambiguate by using `local ΔX` to suppress this warning or `global ΔX` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:68 ┌ Warning: Assignment to `h` in soft scope is ambiguous because a global variable by the same name exists: `h` will be treated as a new local. Disambiguate by using `local h` to suppress this warning or `global h` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:69 ┌ Warning: Assignment to `maxiter` in soft scope is ambiguous because a global variable by the same name exists: `maxiter` will be treated as a new local. Disambiguate by using `local maxiter` to suppress this warning or `global maxiter` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:70 ┌ Warning: Assignment to `err1` in soft scope is ambiguous because a global variable by the same name exists: `err1` will be treated as a new local. Disambiguate by using `local err1` to suppress this warning or `global err1` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:71 ┌ Warning: Assignment to `err2` in soft scope is ambiguous because a global variable by the same name exists: `err2` will be treated as a new local. Disambiguate by using `local err2` to suppress this warning or `global err2` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:72 ┌ Warning: Assignment to `dW` in soft scope is ambiguous because a global variable by the same name exists: `dW` will be treated as a new local. Disambiguate by using `local dW` to suppress this warning or `global dW` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:94 ┌ Warning: Assignment to `err3` in soft scope is ambiguous because a global variable by the same name exists: `err3` will be treated as a new local. Disambiguate by using `local err3` to suppress this warning or `global err3` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:99 ┌ Warning: Assignment to `err4` in soft scope is ambiguous because a global variable by the same name exists: `err4` will be treated as a new local. Disambiguate by using `local err4` to suppress this warning or `global err4` to assign to the existing global variable. └ @ ~/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_dense_glow.jl:100 Testing Dense Glow with dimensions nx=32 Gradient test glow: input 0.16273117; 0.072772585 0.10050583; 0.02655451 0.054912567; 0.0064851455 0.029319763; 0.0010421574 Gradient test glow: parameters 0.08470154; 0.04437387 0.046499252; 0.026335418 0.023281097; 0.01319918 0.011613846; 0.0065728873 Testing Dense Glow with dimensions nx=2 Gradient test glow: input 0.011595726; 0.003871609 0.007134676; 0.0017264285 0.0039999485; 0.00043376652 0.002108574; 0.00010885601 Gradient test glow: parameters 0.04665613; 0.04665613 0.01659131; 0.01659131 0.0050587654; 0.0050587654 7.8201294e-5; 7.8201294e-5 (norm(X_curr - X_curr_), norm(X_curr_)) = (6.134001f-5, 64.07617f0) (norm(X_curr - X_curr_), norm(X_curr_)) = (8.808811f-5, 64.07617f0) WARNING: Method definition loss(Any, Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_glow.jl:42 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_hyperbolic_network.jl:32. Gradient test hyperbolic network 2005.5723; 221.49847 1058.3438; 55.19162 542.9883; 13.779404 274.91406; 3.46978 138.31445; 0.8774681 69.375; 0.22096062 rate_1 = sum(err1[1:end - 1] ./ err1[2:end]) / (maxiter - 1) = 1.9601128f0 rate_2 = sum(err2[1:end - 1] ./ err2[2:end]) / (maxiter - 1) = 3.983071f0 Gradient test invertible layer for layer 1 176.32422; 5.674164 89.5918; 1.4073944 45.132812; 0.36678314 22.65625; 0.09354782 11.349609; 0.025289536 5.680664; 0.0067853928 local rate_1 = sum(err3[1:end - 1] ./ err3[2:end]) / (maxiter - 1) = 1.987875f0 local rate_2 = sum(err4[1:end - 1] ./ err4[2:end]) / (maxiter - 1) = 3.8431497f0 Gradient test invertible layer for layer 2 798.8994; 33.88733 408.29297; 8.100403 206.21582; 1.9808655 103.61133; 0.48701477 51.921875; 0.12729645 25.989258; 0.03532791 local rate_1 = sum(err3[1:end - 1] ./ err3[2:end]) / (maxiter - 1) = 1.9840477f0 local rate_2 = sum(err4[1:end - 1] ./ err4[2:end]) / (maxiter - 1) = 3.9538429f0 Gradient test invertible layer for layer 3 1071.1426; 46.096558 547.7334; 10.886169 276.66406; 2.6457214 139.00586; 0.6490326 69.666016; 0.16143036 34.86914; 0.044582367 local rate_1 = sum(err3[1:end - 1] ./ err3[2:end]) / (maxiter - 1) = 1.983784f0 local rate_2 = sum(err4[1:end - 1] ./ err4[2:end]) / (maxiter - 1) = 4.0133824f0 Gradient test invertible layer for layer 4 425.57227; 14.415924 216.7539; 3.2401886 109.22266; 0.7743912 54.807617; 0.19090652 27.445312; 0.053949356 13.731445; 0.018185616 local rate_1 = sum(err3[1:end - 1] ./ err3[2:end]) / (maxiter - 1) = 1.9872872f0 local rate_2 = sum(err4[1:end - 1] ./ err4[2:end]) / (maxiter - 1) = 3.838977f0 Gradient test invertible layer for layer 5 100.18945; 5.730301 51.585938; 1.3739395 26.144531; 0.33540726 13.15625; 0.08371925 6.5976562; 0.022328377 3.303711; 0.006281376 local rate_1 = sum(err3[1:end - 1] ./ err3[2:end]) / (maxiter - 1) = 1.9787296f0 local rate_2 = sum(err4[1:end - 1] ./ err4[2:end]) / (maxiter - 1) = 3.915505f0 Jacobian test 51.16396; 5.4137487 25.800016; 1.4633335 12.9542885; 0.41886997 6.490106; 0.1324273 3.2485218; 0.045187637 Invertibility test multiscale HINT network WARNING: Method definition loss(Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_coupling_layer_hint.jl:33 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_multiscale_hint_network.jl:37. Gradient test multiscale HINT net: input 8.808594; 37.73768 13.1015625; 10.171574 8.958984; 2.6775837 5.1191406; 0.6991434 2.7207031; 0.18843889 1.40625; 0.04832101 Jacobian test 49.503975; 30.378218 23.102898; 10.54348 11.242197; 3.6620078 5.5794253; 1.2730979 2.7834933; 0.44351527 WARNING: Method definition inv_test(Any, Any, Any, Any, Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_hint.jl:22 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_multiscale_conditional_hint_network.jl:17. WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_generator.jl:72 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_multiscale_conditional_hint_network.jl:49. WARNING: Method definition grad_test_X(Any, Any, Any, Any, Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_layers/test_conditional_layer_hint.jl:74 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_multiscale_conditional_hint_network.jl:63. Multiscale Conditional HINT invertibility test with squeeze_type=ShuffleLayer, split_scales=true, logdet=false Multiscale Conditional HINT gradient test with squeeze_type=ShuffleLayer, split_scales=true, logdet=false 0.37434214; 0.4175967 0.057311743; 0.07893902 0.009936303; 0.02074994 0.00020681322; 0.0056136316 0.0013027638; 0.0014006452 Multiscale Conditional HINT invertibility test with squeeze_type=ShuffleLayer, split_scales=true, logdet=true Multiscale Conditional HINT gradient test with squeeze_type=ShuffleLayer, split_scales=true, logdet=true 147.4895; 166.8316 36.021362; 45.692413 5.5045166; 10.340042 0.11706543; 2.300697 0.6921387; 0.51674265 Multiscale Conditional HINT invertibility test with squeeze_type=ShuffleLayer, split_scales=false, logdet=false Multiscale Conditional HINT gradient test with squeeze_type=ShuffleLayer, split_scales=false, logdet=false 312.97525; 328.89798 53.283386; 61.244747 9.885315; 13.8659935 0.8001785; 2.7905183 0.61953735; 0.37563252 Multiscale Conditional HINT invertibility test with squeeze_type=ShuffleLayer, split_scales=false, logdet=true Multiscale Conditional HINT gradient test with squeeze_type=ShuffleLayer, split_scales=false, logdet=true 250.63135; 261.56384 11.25415; 16.720394 1.4077148; 4.1408367 0.3173828; 1.6839439 0.123291016; 0.80657154 Multiscale Conditional HINT invertibility test with squeeze_type=WaveletLayer, split_scales=true, logdet=false Multiscale Conditional HINT gradient test with squeeze_type=WaveletLayer, split_scales=true, logdet=false 0.1897124; 0.18912023 0.043896034; 0.043599952 0.011594623; 0.011446582 0.0033985078; 0.0033244875 0.0008973181; 0.00086030795 Multiscale Conditional HINT invertibility test with squeeze_type=WaveletLayer, split_scales=true, logdet=true Multiscale Conditional HINT gradient test with squeeze_type=WaveletLayer, split_scales=true, logdet=true 108.70618; 120.89197 30.755127; 36.848022 5.7904053; 8.836851 0.97021484; 2.493438 0.020751953; 0.7408597 Multiscale Conditional HINT invertibility test with squeeze_type=WaveletLayer, split_scales=false, logdet=false Multiscale Conditional HINT gradient test with squeeze_type=WaveletLayer, split_scales=false, logdet=false 180.88351; 199.41824 38.49398; 47.761345 8.965988; 13.59967 2.5552902; 4.8721313 0.55371094; 1.7121315 Multiscale Conditional HINT invertibility test with squeeze_type=WaveletLayer, split_scales=false, logdet=true Multiscale Conditional HINT gradient test with squeeze_type=WaveletLayer, split_scales=false, logdet=true 71.27588; 56.51244 9.1484375; 16.530159 4.158203; 7.849064 0.6489258; 2.4943562 0.18579102; 0.7369242 Multiscale Conditional HINT invertibility test with squeeze_type=HaarLayer, split_scales=true, logdet=false Multiscale Conditional HINT gradient test with squeeze_type=HaarLayer, split_scales=true, logdet=false 0.39765114; 0.4374489 0.08068308; 0.10058197 0.016768843; 0.026718285 0.0025344193; 0.00750914 0.00028339028; 0.00220397 Multiscale Conditional HINT invertibility test with squeeze_type=HaarLayer, split_scales=true, logdet=true Multiscale Conditional HINT gradient test with squeeze_type=HaarLayer, split_scales=true, logdet=true 136.47766; 113.292145 41.400024; 29.807268 12.420288; 6.623909 4.10376; 1.2055702 1.6922607; 0.24316609 Multiscale Conditional HINT invertibility test with squeeze_type=HaarLayer, split_scales=false, logdet=false Multiscale Conditional HINT gradient test with squeeze_type=HaarLayer, split_scales=false, logdet=false 390.83392; 427.20135 62.479057; 80.66277 8.682854; 17.774712 1.3056183; 3.240311 1.9436798; 0.3292848 Multiscale Conditional HINT invertibility test with squeeze_type=HaarLayer, split_scales=false, logdet=true Multiscale Conditional HINT gradient test with squeeze_type=HaarLayer, split_scales=false, logdet=true 51.945312; 64.22114 25.264404; 19.126492 10.261475; 7.192518 3.048584; 1.5141062 0.8564453; 0.0892064 Jacobian test 30.529066; 15.298814 12.53632; 4.1191573 5.745388; 1.3361905 2.777173; 0.44897848 1.3659217; 0.1444986 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_multiscale_conditional_hint_network.jl:49 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_conditional_hint_network.jl:50. Invertibility test HINT network Gradient test cond. HINT net: input 78.7323; 13.066368 42.497437; 3.4018974 22.09436; 0.8553066 11.240234; 0.23459911 5.6817627; 0.05565405 2.855713; 0.0129954815 Invertibility test HINT network Gradient test cond. HINT net: input 69.20566; 9.53241 36.957214; 2.4118195 19.095123; 0.5893936 9.706146; 0.13611221 4.8923645; 0.028764725 2.4532166; 0.0073480606 Invertibility test HINT network Gradient test cond. HINT net: input 97.983154; 7.3814545 50.767944; 1.91436 25.897339; 0.44381332 13.074951; 0.09562492 6.5720215; 0.013266563 3.2885742; 0.004069805 Invertibility test HINT network Gradient test cond. HINT net: input 78.5141; 6.162014 40.68567; 1.6523876 20.805542; 0.3634863 10.519836; 0.06467724 5.283203; 0.009053707 2.6420898; 0.0040385723 Jacobian test 21.058523; 13.525656 11.417022; 4.6037498 5.919556; 1.5742716 3.02378; 0.52976996 1.5278748; 0.17143705 WARNING: Method definition loss(Any, Any, Any) in module Main at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_conditional_hint_network.jl:50 overwritten at /home/pkgeval/.julia/packages/InvertibleNetworks/NquDv/test/test_networks/test_conditional_glow_network.jl:99. Gradient test glow: input 118.4306; 14.168076 62.65277; 3.6465683 32.185364; 0.9643059 16.31903; 0.25580406 Gradient test glow: input 5.20166; 0.89034045 2.8319092; 0.21409112 1.4716187; 0.0513815 0.7475586; 0.013941482 Gradient test glow: input 137.91669; 17.088058 73.10339; 4.398979 37.586; 1.1651878 19.063232; 0.31236076 Gradient test glow: input 12.586548; 3.041328 7.1844482; 0.62948966 3.746704; 0.16026485 1.9017334; 0.051751077 Gradient test glow: input 4207.543; 491.34082 2225.3125; 124.129395 1143.5508; 31.170166 579.46875; 7.8917236 Gradient test glow: input 279.67578; 55.630196 153.96484; 13.688145 80.708984; 3.1175098 41.26172; 0.6515284 Gradient test glow: input 4116.8945; 489.3081 2180.3203; 122.781006 1120.791; 30.759644 568.06836; 7.70697 Gradient test glow: input 181.01562; 33.859337 99.203125; 8.234356 51.632812; 2.085928 26.316406; 0.5429639 Test Summary: | Pass Total Time Networks | 282 282 12m36.3s Test test_networks/test_unrolled_loop.jl | 10 10 56.5s Test test_networks/test_generator.jl | 5 5 3.7s Test test_networks/test_glow.jl | 80 80 5m43.6s Test test_networks/test_dense_glow.jl | 14 14 22.7s Test test_networks/test_hyperbolic_network.jl | 17 17 4.7s Test test_networks/test_multiscale_hint_network.jl | 7 7 21.6s Test test_networks/test_multiscale_conditional_hint_network.jl | 87 87 19.3s Test test_networks/test_conditional_hint_network.jl | 31 31 10.0s Test test_networks/test_conditional_glow_network.jl | 31 31 4m34.2s ───────────────────────────────────────────────────────────────────────────────────────────────────── Time Allocations ─────────────── ─────────────── Total measured: 1544s 96.5GiB Section ncalls time %tot alloc %tot ───────────────────────────────────────────────────────────────────────────────────────────────────── test_utils/test_objectives.jl 1 20.3s 1.3% 206MiB 0.2% test_utils/test_sequential.jl 1 231s 15.0% 5.66GiB 5.9% test_utils/test_nnlib_convolution.jl 1 16.1s 1.0% 512MiB 0.5% test_utils/test_activations.jl 1 42.5s 2.8% 184MiB 0.2% test_utils/test_squeeze.jl 1 74.7s 4.9% 759MiB 0.8% test_utils/test_jacobian.jl 1 1.68s 0.1% 63.6MiB 0.1% test_utils/test_chainrules.jl 1 149s 9.7% 4.36GiB 4.5% test_utils/test_flux.jl 1 39.7s 2.6% 392MiB 0.4% test_layers/test_residual_block.jl 1 3.90s 0.3% 174MiB 0.2% test_layers/test_flux_block.jl 1 7.12s 0.5% 140MiB 0.1% test_layers/test_resnet.jl 1 34.3s 2.2% 298MiB 0.3% test_layers/test_layer_conv1x1.jl 1 5.35s 0.3% 70.4MiB 0.1% test_layers/test_coupling_layer_basic.jl 1 4.46s 0.3% 65.1MiB 0.1% test_layers/test_coupling_layer_irim.jl 1 2.17s 0.1% 121MiB 0.1% test_layers/test_coupling_layer_glow.jl 1 48.9s 3.2% 693MiB 0.7% test_layers/test_coupling_layer_hint.jl 1 16.0s 1.0% 635MiB 0.6% test_layers/test_conditional_layer_glow.jl 1 1.25s 0.1% 63.4MiB 0.1% test_layers/test_conditional_layer_hint.jl 1 31.4s 2.0% 3.79GiB 3.9% test_layers/test_conditional_res_block.jl 1 13.2s 0.9% 2.42GiB 2.5% test_layers/test_hyperbolic_layer.jl 1 16.0s 1.0% 541MiB 0.5% test_layers/test_actnorm.jl 1 8.29s 0.5% 51.8MiB 0.1% test_layers/test_layer_affine.jl 1 12.0s 0.8% 110MiB 0.1% test_networks/test_unrolled_loop.jl 1 56.5s 3.7% 652MiB 0.7% test_networks/test_generator.jl 1 3.67s 0.2% 209MiB 0.2% test_networks/test_glow.jl 1 344s 22.4% 45.5GiB 47.2% test_networks/test_dense_glow.jl 1 22.7s 1.5% 276MiB 0.3% test_networks/test_hyperbolic_network.jl 1 4.72s 0.3% 515MiB 0.5% test_networks/test_multiscale_hint_network.jl 1 21.6s 1.4% 2.85GiB 3.0% test_networks/test_multiscale_conditional_hint_network.jl 1 19.3s 1.3% 1.88GiB 2.0% test_networks/test_conditional_hint_network.jl 1 10.0s 0.7% 1.01GiB 1.0% test_networks/test_conditional_glow_network.jl 1 274s 17.9% 22.4GiB 23.2% ───────────────────────────────────────────────────────────────────────────────────────────────────── Testing InvertibleNetworks tests passed Testing completed after 1591.84s PkgEval succeeded after 2428.42s