ch02. Get Comfortable with Nx
Section
Mix.install([
{:nx, "~>0.5"},
{:exla, "~>0.5"},
{:benchee, github: "bencheeorg/benchee", override: true}
])
Nx.tensor([1, 2, 3])
a = Nx.tensor([[1, 2, 3], [4, 5, 6]])
b = Nx.tensor(1.0)
c = Nx.tensor([[[[[[1.0, 2]]]]]])
dbg(a)
dbg(b)
dbg(c)
a = Nx.tensor([1, 2, 3])
b = Nx.tensor([1.0, 2.0, 3.0])
dbg(a)
dbg(b)
Nx.tensor(0.0000000000000000000000000000000000000000000001)
Nx.tensor(1.0e-45)
Nx.tensor(1.0e-46, type: {:f, 64})
Nx.tensor(128, type: {:s, 8})
Nx.tensor([1.0, 2, 3])
a = Nx.tensor([1, 2])
b = Nx.tensor([[1, 2], [3, 4]])
c = Nx.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
dbg(a)
dbg(b)
dbg(c)
Nx.tensor(10)
Nx.tensor([[1,2,3], [4,5,6]], names: [:x, :y])
a = Nx.tensor([[1,2,3], [4,5,6]])
Nx.to_binary(a)
<<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
<<1::64-signed-native, 2::64-signed-native, 3::64-signed-native>>
|> Nx.from_binary({:s, 64})
|> Nx.reshape({1,3})
2.2 Using Nx Operations
- shape and type operations
- element-wise unary operations
- element-wise binary operations
- reductions
1. Shape and Type Operations
a = Nx.tensor([1,2,3])
a
|> Nx.as_type({:f, 32})
|> Nx.reshape({1,3,1})
a
dbg(Nx.bitcast(a, {:f, 64}))
dbg(a)
a
2. Element-wise Unary Operations
####
a = [-1,-2,-3,0,1,2,3]
Enum.map(a, &abs/1)
a = Nx.tensor(
[
[
[-1,-2,-3],
[-4,-5,-6]
],
[
[1,2,3],
[4,5,6]
]
]
)
Nx.abs(a)
3. Element-wise Binary Operations
a = [1,2,3]
b = [4,5,6]
Enum.zip_with(a, b, fn x,y -> x + y end)
a = Nx.tensor(
[
[1,2,3],
[4,5,6]
]
)
b = Nx.tensor(
[
[6,7,8],
[9,10,11]
]
)
Nx.add(a, b)
Nx.multiply(a, b)
(Nx.iota({1,2}))
Nx.shape({1,3,3,2})
1 行 3 列 3 ページ 4次元が 2 項
Nx.shape({1,3,3,2})
Nx.iota({1,3,3,2})
Nx.iota({2,3,3} ) #, names: [:x, :y])
Nx.iota({4,1,3,2})
3 行 2 列のスカラーが、4次元分ある
Nx.iota({4,1,3,2})
a = Nx.iota({1,3,3,2})
b = Nx.iota({4,1,3,2})
Nx.add(a, b)
a = Nx.iota({1,3,3,2})
b = Nx.iota({4,1,3,2})
Nx.add(a, b)
a = Nx.tensor([[1,2,3],[4,5,6]], names: [:x, :y])
Nx.sum(a, axes: [:y])
Nx.add(5, Nx.tensor([1,2,3]))
Nx.add(Nx.tensor([1,2,3]), Nx.tensor([[4,5,6], [7,8,9]]))
Reductions
revs = Nx.tensor([85,76,42,34,46,23,52,99,22,32,85,51])
Nx.sum(revs)
revs = Nx.tensor(
[
[21, 64, 86, 26, 74, 81, 38, 79, 70, 48, 85, 33],
[64, 82, 48, 39, 70, 71, 81, 53, 50, 67, 36, 50],
[68, 74, 39, 78, 95, 62, 53, 21, 43, 59, 51, 88],
[47, 74, 97, 51, 98, 47, 61, 36, 83, 55, 74, 43]
], names: [:year, :month])
Nx.sum(revs, axes: [:year])
Nx.sum(revs, axes: [:month])
2.3 Representing the World
- Tablar Data
- Images
- Video
- Audio
- Text
2.4 Going from def to defn
defmodule MyModule do
def adds_one(x) do
Nx.add(x, 1)
end
end
defmodule MyModule2 do
import Nx.Defn
defn adds_one(x) do
Nx.add(x, 1)
end
end
defmodule MyModule3 do
import Nx.Defn
defn adds_one(x) do
Nx.add(x, 1) |> print_expr()
end
end
MyModule3.adds_one(Nx.tensor([1,2,3]))
MyModule.adds_one(Nx.tensor([1,2,3]))
$$ \mathrm{softmax(n)} = \frac{\exp(n)}{\sum{\exp(n)}} $$
defmodule Softmax do
import Nx.Defn
defn softmax(n) do
Nx.exp(n) / Nx.sum(Nx.exp(n))
end
end
key = Nx.Random.key(42)
{tensor, _key} = Nx.Random.uniform(key, shape: {1_000_000})
Benchee.run(
%{
"JIT with EXLA" => fn ->
apply(EXLA.jit(&Softmax.softmax/1), [tensor])
end,
"Regular Elixir" => fn ->
Softmax.softmax(tensor)
end
},
time: 10
)
Nx.Defn.global_default_options(compiler: EXLA)
####
Nx.default_backend(EXLA.Backend)