#!/usr/bin/env python # coding: utf-8 # In[3]: Base.banner() # generic binnary # In[4]: Sys.cpu_summary() # In[5]: using Benchmarks: @benchmark # Pkg.clone("https://github.com/johnmyleswhite/Benchmarks.jl.git") Benchmarks.Environment() # ### Memoized Julia # In[2]: using Memoize: @memoize # Pkg.add("Memoize") @memoize jl_fib(n) = n < 2 ? n : jl_fib(n-1) + jl_fib(n-2) # Type inference for memoized functions currently not implemeted, which is why # the type annotation is needed for it to be fast: # * https://github.com/simonster/Memoize.jl#implementation-notes @benchmark jl_fib(20)::Int # In[7]: @benchmark jl_fib(20.0)::Float64 # ### Memoized Python # In[6]: import sys sys.version # In[1]: from functools import lru_cache as cache @cache(maxsize = None) def py_fib(n): if n < 2: return n return py_fib(n-1) + py_fib(n-2) get_ipython().run_line_magic('timeit', 'py_fib(20)') # In[5]: get_ipython().run_line_magic('timeit', 'py_fib(20.0)')