Machine Learning in Elixir - Chapter 2
Mix.install([
{:nx, "~> 0.5"},
{:exla, "~> 0.5"},
{:benchee, github: "bencheeorg/benchee", override: true}
])
Section
Nx.tensor([1, 2, 3])
a = Nx.tensor([[1, 2, 3], [4, 5, 6 ]])
b = Nx.tensor(1.0)
c = Nx.tensor([[[[[[1.0, 2]]]]]])
dbg(a)
dbg(b)
dbg(c)
a = Nx.tensor([1, 2, 3])
b = Nx.tensor([1.0, 2.0, 3.0])
dbg(a)
dbg(b)
Nx.tensor(0.0000000000000000000000000000000000000000000001)
Nx.tensor(1.0e-45, type: {:f, 64})
Nx.tensor(128, type: {:s, 8})
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
dbg(a)
Nx.to_binary(a)
<<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
<<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
|> Nx.reshape({1, 3})
a = Nx.tensor([1, 2, 3])
a
|> Nx.as_type(:f32)
|> Nx.reshape({1, 3, 1})
Nx.bitcast(a, {:f, 32})
a = Nx.tensor([[[-1, -2, -3], [-4, -5, -6]], [[1, 2, 3], [4, 5, 6]]])
Nx.abs(a)
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
b = Nx.tensor([[6, 7, 8], [9, 10, 11]])
Nx.add(a, b)
|> dbg()
Nx.multiply(a, b)
Nx.multiply(Nx.tensor(3), Nx.tensor([[1, 2, 3], [4, 5, 6]]))
Nx.multiply(Nx.tensor([1, 2, 3]), Nx.tensor([[1, 2, 3], [4, 5, 6]]))
revs = Nx.tensor([85, 76, 42, 34, 46, 23, 52, 99, 22, 32, 85, 51])
Nx.sum(revs)
revs = Nx.tensor(
[
[21, 64, 86, 26, 74, 81, 38, 79, 70, 48, 85, 33],
[64, 82, 48, 39, 70, 71, 81, 53, 50, 67, 36, 50],
[68, 74, 39, 78, 95, 62, 53, 21, 43, 59, 51, 88],
[47, 74, 97, 51, 98, 47, 61, 36, 83, 55, 74, 43]
],
names: [:year, :month]
)
Nx.sum(revs, axes: [:year])
|> dbg()
Nx.sum(revs, axes: [:month])
defmodule MyModule do
import Nx.Defn
defn adds_one(x) do
Nx.add(x, 1) |> print_expr()
end
end
MyModule.adds_one(Nx.tensor([1, 2, 3]))
defmodule Softmax do
import Nx.Defn
defn softmax(n), do: Nx.exp(n) / Nx.sum(Nx.exp(n))
end
key = Nx.Random.key(42)
{tensor, _key} = Nx.Random.uniform(key, shape: {1_000_000})
Benchee.run(
%{
"JIT with EXLA" => fn ->
apply(EXLA.jit(&Softmax.softmax/1), [tensor])
end,
"Regular Elixir" => fn ->
Softmax.softmax(tensor)
end
}
)
Nx.Defn.global_default_options(compiler: EXLA)
key = Nx.Random.key(42)
{tensor, _key} = Nx.Random.uniform(key, shape: {1_000_000})
Benchee.run(
%{
"JIT with EXLA" => fn ->
apply(EXLA.jit(&Softmax.softmax/1), [tensor])
end,
"Regular Elixir" => fn ->
Softmax.softmax(tensor)
end
}
)