Skip to content

Benchmark produce #134

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Mar 29, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 32 additions & 8 deletions perf/benchmark.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ using BenchmarkTools
####################################################################

function benchmark_driver!(f, x...; f_displayname=string(f))
x = (x..., nothing)

println("benchmarking $(f_displayname)...")
tf = Libtask.TapedFunction(f, x)
tf = Libtask.TapedFunction(f, x...);

print(" Run Original Function:")
@btime $f($(x)...)
Expand All @@ -20,30 +22,48 @@ function benchmark_driver!(f, x...; f_displayname=string(f))
print(" Run TapedFunction (compiled):")
@btime $ctf($(x)...)
GC.gc()

print(" Run TapedTask: ")
x = (x[1:end-1]..., produce);
# show the number of produce calls inside `f`
f_task = (tf, x; verbose=false) -> begin
tt = TapedTask(tf, x...);
c = 0
while consume(tt)!==nothing
c+=1
end
verbose && print("#produce=", c, "; ");
end
f_task(tf, x; verbose=true) # print #produce calls.
@btime $f_task($tf, $x)
GC.gc()
end

####################################################################


function rosenbrock(x)
function rosenbrock(x, callback=nothing)
i = x[2:end]
j = x[1:end-1]
return sum((1 .- j).^2 + 100*(i - j.^2).^2)
ret = sum((1 .- j).^2 + 100*(i - j.^2).^2)
callback !== nothing && callback(ret)
return ret
end

x = rand(100000)
benchmark_driver!(rosenbrock, x)

####################################################################

function ackley(x::AbstractVector)
function ackley(x::AbstractVector, callback=nothing)
a, b, c = 20.0, -0.2, 2.0*π
len_recip = inv(length(x))
sum_sqrs = zero(eltype(x))
sum_cos = sum_sqrs
for i in x
sum_cos += cos(c*i)
sum_sqrs += i^2
callback !== nothing && callback(sum_sqrs)
end
return (-a * exp(b * sqrt(len_recip*sum_sqrs)) -
exp(len_recip*sum_cos) + a + MathConstants.e)
Expand All @@ -54,11 +74,13 @@ benchmark_driver!(ackley, x)

####################################################################
function generate_matrix_test(n)
return x -> begin
return (x, callback=nothing) -> begin
# @assert length(x) == 2n^2 + n
a = reshape(x[1:n^2], n, n)
b = reshape(x[n^2 + 1:2n^2], n, n)
return log.((a * b) + a - b)
ret = log.((a * b) + a - b)
callback !== nothing && callback(ret)
return ret
end
end

Expand All @@ -71,10 +93,12 @@ benchmark_driver!(matrix_test, x; f_displayname="matrix_test")
relu(x) = log.(1.0 .+ exp.(x))
sigmoid(n) = 1. / (1. + exp(-n))

function neural_net(w1, w2, w3, x1)
function neural_net(w1, w2, w3, x1, callback=nothing)
x2 = relu(w1 * x1)
x3 = relu(w2 * x2)
return sigmoid(LinearAlgebra.dot(w3, x3))
ret = sigmoid(LinearAlgebra.dot(w3, x3))
callback !== nothing && callback(ret)
return ret
end

xs = (randn(10,10), randn(10,10), randn(10), rand(10))
Expand Down