diff --git a/perf/benchmark.jl b/perf/benchmark.jl index 692bb9de..2e8de360 100644 --- a/perf/benchmark.jl +++ b/perf/benchmark.jl @@ -5,8 +5,10 @@ using BenchmarkTools #################################################################### function benchmark_driver!(f, x...; f_displayname=string(f)) + x = (x..., nothing) + println("benchmarking $(f_displayname)...") - tf = Libtask.TapedFunction(f, x) + tf = Libtask.TapedFunction(f, x...); print(" Run Original Function:") @btime $f($(x)...) @@ -20,15 +22,32 @@ function benchmark_driver!(f, x...; f_displayname=string(f)) print(" Run TapedFunction (compiled):") @btime $ctf($(x)...) GC.gc() + + print(" Run TapedTask: ") + x = (x[1:end-1]..., produce); + # show the number of produce calls inside `f` + f_task = (tf, x; verbose=false) -> begin + tt = TapedTask(tf, x...); + c = 0 + while consume(tt)!==nothing + c+=1 + end + verbose && print("#produce=", c, "; "); + end + f_task(tf, x; verbose=true) # print #produce calls. + @btime $f_task($tf, $x) + GC.gc() end #################################################################### -function rosenbrock(x) +function rosenbrock(x, callback=nothing) i = x[2:end] j = x[1:end-1] - return sum((1 .- j).^2 + 100*(i - j.^2).^2) + ret = sum((1 .- j).^2 + 100*(i - j.^2).^2) + callback !== nothing && callback(ret) + return ret end x = rand(100000) @@ -36,7 +55,7 @@ benchmark_driver!(rosenbrock, x) #################################################################### -function ackley(x::AbstractVector) +function ackley(x::AbstractVector, callback=nothing) a, b, c = 20.0, -0.2, 2.0*π len_recip = inv(length(x)) sum_sqrs = zero(eltype(x)) @@ -44,6 +63,7 @@ function ackley(x::AbstractVector) for i in x sum_cos += cos(c*i) sum_sqrs += i^2 + callback !== nothing && callback(sum_sqrs) end return (-a * exp(b * sqrt(len_recip*sum_sqrs)) - exp(len_recip*sum_cos) + a + MathConstants.e) @@ -54,11 +74,13 @@ benchmark_driver!(ackley, x) #################################################################### function generate_matrix_test(n) - return x -> begin + return (x, callback=nothing) -> begin # @assert length(x) == 2n^2 + n a = reshape(x[1:n^2], n, n) b = reshape(x[n^2 + 1:2n^2], n, n) - return log.((a * b) + a - b) + ret = log.((a * b) + a - b) + callback !== nothing && callback(ret) + return ret end end @@ -71,10 +93,12 @@ benchmark_driver!(matrix_test, x; f_displayname="matrix_test") relu(x) = log.(1.0 .+ exp.(x)) sigmoid(n) = 1. / (1. + exp(-n)) -function neural_net(w1, w2, w3, x1) +function neural_net(w1, w2, w3, x1, callback=nothing) x2 = relu(w1 * x1) x3 = relu(w2 * x2) - return sigmoid(LinearAlgebra.dot(w3, x3)) + ret = sigmoid(LinearAlgebra.dot(w3, x3)) + callback !== nothing && callback(ret) + return ret end xs = (randn(10,10), randn(10,10), randn(10), rand(10))