Ch2: Thinking in Tensors
Mix.install([
{:nx, "~> 0.5"},
{:exla, "~> 0.5"},
{:benchee, github: "bencheeorg/benchee", override: true}
])
Get comfortable with Nx
# Basic tensor
Nx.tensor([1,2,3])
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
b = Nx.tensor(1.0)
c = Nx.tensor([[[[[[1.0, 2]]]]]])
dbg(a)
dbg(b)
dbg(c)
# Tensors have a type
a = Nx.tensor([1, 2, 3])
b = Nx.tensor([1.0, 2.0, 3.0])
c = Nx.tensor(128, type: {:s, 8}) # 127 is max positive int for 8 bits so this will flip to -128
scalar = Nx.tensor(1.0)
dbg(a)
dbg(b)
dbg(c)
dbg(scalar)
# Tensors have a shape
a = Nx.tensor([1, 2])
b = Nx.tensor([[1, 2], [3, 4]])
c = Nx.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
scalar = Nx.tensor(1.0)
dbg(a)
dbg(b)
dbg(c)
dbg(scalar)
# Named dimensions
named = Nx.tensor([[1, 2, 3], [4, 5, 6]], names: [:x, :y])
# Tensor data is stored as Elixir binary (byte array)
Nx.to_binary(named)
# Because Nx has to turn your data into a binary representation when you use Nx.tensor/2,
# it’s more performant to instead create tensors using Nx.from_binary/2
t = <<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
|> dbg
t
|> Nx.reshape({1, 3})
Using Nx Operations
# Shape and type operations
# Create tensor of type and shape s32[3]
a = Nx.tensor([1, 2, 3])
a
|> Nx.as_type({:f, 32})
|> Nx.reshape({1,3,1})
# Element-wise unary operations
a = [-1, -2, -3, 0, 1, 2, 3]
Enum.map(a, &abs/1)
# This gets insane with loads of nesting
a = Nx.tensor([[[-1, -2, -3], [-4, -5, -6]], [[1, 2, 3], [4, 5, 6]]])
# Element-wise operation on flattened representation of data
Nx.abs(a)
# Element-wise binary operations
a = [1, 2, 3]
b = [4, 5, 6]
# [5,7,9]
Enum.zip_with(a, b, &(&1 + &2))
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
b = Nx.tensor([[6, 7, 8], [9, 10, 11]])
added = Nx.add(a, b)
multi = Nx.multiply(a, b)
# Broadcasting is supported
# [6,7,8]
Nx.add(5, Nx.tensor([1, 2, 3]))
# [[5,7,9],[8,10,12]]
Nx.add(Nx.tensor([1, 2, 3]), Nx.tensor([[4, 5, 6], [7, 8, 9]]))
# Reductions
r = Nx.tensor([85, 76, 42, 34, 46, 23, 52, 99, 22, 32, 85, 51])
# 647
Nx.sum(r)
revs =
Nx.tensor(
[
[21, 64, 86, 26, 74, 81, 38, 79, 70, 48, 85, 33],
[64, 82, 48, 39, 70, 71, 81, 53, 50, 67, 36, 50],
[68, 74, 39, 78, 95, 62, 53, 21, 43, 59, 51, 88],
[47, 74, 97, 51, 98, 47, 61, 36, 83, 55, 74, 43]
],
names: [:year, :month]
)
# [200, 294, 270, 194, 337, 261, 233, 189, 246, 229, 246, 214]
Nx.sum(revs, axes: [:year])
# [705, 711, 731, 766]
Nx.sum(revs, axes: [:month])
# 2913
Nx.sum(revs)
From def to defn
defmodule MyModule do
import Nx.Defn
defn adds_one(x) do
Nx.add(x, 1)
|> print_expr()
end
end
MyModule.adds_one(Nx.tensor([1,2,3]))
defmodule Softmax do
import Nx.Defn
defn(softmax(n), do: Nx.exp(n) / Nx.sum(Nx.exp(n)))
end
# Setting the default compiler tells Nx to always JIT-compile
# all defn invocations with the given compiler. E.g.
# Nx.Defn.global_default_options(compiler: EXLA)
key = Nx.Random.key(42)
{tensor, _key} = Nx.Random.uniform(key, shape: {1_000_000})
Benchee.run(
%{
"JIT with EXLA" => fn -> apply(EXLA.jit(&Softmax.softmax/1), [tensor]) end,
"Regular Elixir" => fn -> Softmax.softmax(tensor) end
},
time: 10
)