MLIE Chapter 2
Mix.install(
[
# {:nx, "~> 0.5"},
{:exla, "== 0.5.0"},
{:benchee, "~> 1.2"}
]
# config: [
# nx: [?
# default_backend: EXLA.Backend,
# # default_defn_options: [compiler: EXLA]
# ],
# exla: [
# default_client: :cuda,
# clients: [
# host: [platform: :host],
# cuda: [platform: :cuda]
# ]
# ]
#],
#system_env: [
# XLA_TARGET: "cuda120"
#]
)
Section
# 39 : Understanding Nx Tensors
Nx.tensor([1, 2, 3])
# 39
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
b = Nx.tensor(1.0)
c = Nx.tensor([[[[[[1.0, 2]]]]]])
dbg(a)
dbg(b)
dbg(c)
# 40 : Tensors Have a Type
a = Nx.tensor([1, 2, 3])
b = Nx.tensor([1.0, 2.0, 3.0])
dbg(a)
dbg(b)
# 42
Nx.tensor(0.0000000000000000000000000000000000000000000001)
# 42
Nx.tensor(1.0e-45, type: {:f, 64})
# 42
Nx.tensor(128, type: {:s, 8})
# 43
Nx.tensor([1.0, 2, 3])
# 43 : Tensors have shape
a = Nx.tensor([1, 2])
b = Nx.tensor([[1, 2], [3, 4]])
c = Nx.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
dbg(a)
dbg(b)
dbg(c)
# 45
Nx.tensor(10)
# 45
Nx.tensor([[1, 2, 3], [4, 5, 6]], names: [:x, :y])
# 46 : Tensors Have Data
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
Nx.to_binary(a)
# 46
<<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
# 47
<<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
|> Nx.reshape({1, 3})
# 48 : Shape and Type Operations
a = Nx.tensor([1, 2, 3])
# 48
a
|> Nx.as_type({:f, 32})
|> Nx.reshape({1, 3, 1})
# 48
Nx.bitcast(a, {:f, 64})
# 49 : Element-wise Unary Operations
a = [-1, -2, -3, 0, 1, 2, 3]
Enum.map(a, &abs/1)
# 49
a = Nx.tensor([[[-1, -2, -3], [-4, -5, -6]], [[1, 2, 3], [4, 5, 6]]])
Nx.abs(a)
# 50 : Element-wise Binary Operations
a = [1, 2, 3]
b = [4, 5, 6]
Enum.zip_with(a, b, fn x, y -> x + y end)
# 51
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
b = Nx.tensor([[6, 7, 8], [9, 10, 11]])
Nx.add(a, b)
# 50
Nx.multiply(a, b)
# 51
Nx.add(5, Nx.tensor([1, 2, 3]))
# 52
Nx.add(Nx.tensor([1, 2, 3]), Nx.tensor([[4, 5, 6], [7, 8, 9]]))
# 52 : Reductions
revs = Nx.tensor([85, 76, 42, 34, 46, 23, 52, 99, 22, 32, 85, 51])
# 52
Nx.sum(revs)
# 52
revs =
Nx.tensor(
[
[21, 64, 86, 26, 74, 81, 38, 79, 70, 48, 85, 33],
[64, 82, 48, 39, 70, 71, 81, 53, 50, 67, 36, 50],
[68, 74, 39, 78, 95, 62, 53, 21, 43, 59, 51, 88],
[47, 74, 97, 51, 98, 47, 61, 36, 83, 55, 74, 43]
],
names: [:year, :month]
)
# 53
Nx.sum(revs, axes: [:year])
# 53
Nx.sum(revs, axes: [:month])
# 56 : Going from def to defn
defmodule MyModule do
import Nx.Defn
defn adds_one(x) do
Nx.add(x, 1) |> print_expr()
end
end
# 57
Nx.tensor([1, 2, 3]) |> MyModule.adds_one()
# 58
defmodule Softmax do
import Nx.Defn
defn(softmax(n), do: Nx.exp(n) / Nx.sum(Nx.exp(n)))
end
# 58
key = Nx.Random.key(42)
{tensor, _key} = Nx.Random.uniform(key, shape: {1_000_000})
Benchee.run(
%{
"JIT with EXLA" => fn ->
apply(EXLA.jit(&Softmax.softmax/1), [tensor])
end,
"Regular Elixir" => fn ->
Softmax.softmax(tensor)
end
},
time: 10
)
# 59
Nx.Defn.global_default_options(compiler: EXLA)
Nx.default_backend(EXLA.Backend)
# 60
key = Nx.Random.key(42)
{tensor, _key} = Nx.Random.uniform(key, shape: {1_000_000})
Benchee.run(
%{
"JIT with EXLA" => fn ->
apply(EXLA.jit(&Softmax.softmax/1), [tensor])
end,
"Regular Elixir" => fn ->
Softmax.softmax(tensor)
end
},
time: 10
)