@@ -9,7 +9,7 @@ CUDA.allowscalar(false)
99
1010 # does not optimize over open vertices
1111 tn = TensorNetworkModel (instance; optimizer = TreeSA (ntrials = 1 , niters = 2 , βs = 1 : 0.1 : 40 ))
12- @info contraction_complexity (tn)
12+ @debug contraction_complexity (tn)
1313 @time marginals2 = marginals (tn; usecuda = true )
1414 @test all (x -> x isa CuArray, marginals2)
1515 # for dangling vertices, the output size is 1.
2626
2727 # does not optimize over open vertices
2828 tn = TensorNetworkModel (instance; optimizer = TreeSA (ntrials = 1 , niters = 2 , βs = 1 : 0.1 : 40 ))
29- @info contraction_complexity (tn)
29+ @debug contraction_complexity (tn)
3030 most_probable_config (tn)
3131 @time logp, config = most_probable_config (tn; usecuda = true )
32- @test log_probability (tn, config) ≈ logp. n
32+ @test log_probability (tn, config) ≈ logp
3333 culogp = maximum_logp (tn; usecuda = true )
3434 @test culogp isa CuArray
3535 @test Array (culogp)[] ≈ logp
5454 culogp = maximum_logp (tn2; usecuda = true )
5555 @test cup isa RescaledArray{T, N, <: CuArray } where {T, N}
5656 @test culogp isa CuArray
57- @test Array (cup)[] ≈ exp (Array (culogp)[]. n )
57+ @test Array (cup)[] ≈ exp (Array (culogp)[])
5858
5959 # does not optimize over open vertices
6060 tn3 = MMAPModel (instance; marginalized = [2 , 4 , 6 ], optimizer)
6161 logp, config = most_probable_config (tn3; usecuda = true )
62- @test log_probability (tn3, config) ≈ logp. n
62+ @test log_probability (tn3, config) ≈ logp
6363end
0 commit comments