FizzBuzz
Mix.install(
[
{:axon, "~> 0.5.1"},
{:nx, "~> 0.5.1"},
{:exla, "~> 0.5.1"},
{:table_rex, "~> 3.1"},
{:kino, "~> 0.9.0"}
],
config: [nx: [default_backend: EXLA.Backend]]
)
:ok
Section
mods = fn n -> [rem(n, 3), rem(n, 5), rem(n, 15)] end
#Function<42.3316493/1 in :erl_eval.expr/6>
fizzbuzz = fn n ->
cond do
# Probability of fizz = 0, buzz = 0, fizzbuzz = 1, other = 0
rem(n, 15) == 0 -> [0, 0, 1, 0]
rem(n, 3) == 0 -> [1, 0, 0, 0]
rem(n, 5) == 0 -> [0, 1, 0, 0]
true -> [0, 0, 0, 1]
end
end
#Function<42.3316493/1 in :erl_eval.expr/6>
fizzbuzz.(1)
[0, 0, 0, 1]
train = for n <- 1..1000, do: Nx.tensor(mods.(n))
[
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 1, 1]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 2, 2]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 3, 3]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 4, 4]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 0, 5]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 1, 6]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 2, 7]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 3, 8]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 4, 9]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 0, 10]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 1, 11]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 2, 12]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 3, 13]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 4, 14]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 0, 0]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 1, 1]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 2, 2]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 3, 3]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 4, 4]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 0, 5]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 1, 6]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 2, 7]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 3, 8]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 4, 9]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 0, 10]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 1, 11]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 2, 12]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 3, 13]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 4, 14]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 0, 0]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 1, 1]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 2, 2]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 3, 3]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 4, 4]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 0, 5]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 1, 6]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 2, 7]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 3, 8]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 4, 9]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 0, 10]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 1, 11]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 2, 12]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 3, 13]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 4, 14]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 0, 0]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, 1, 1]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[2, 2, 2]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[0, 3, ...]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[1, ...]
>,
#Nx.Tensor<
s64[3]
EXLA.Backend
[...]
>,
...
]
test = for(n <- 1..15, do: mods.(n)) |> Nx.tensor()
#Nx.Tensor<
s64[15][3]
EXLA.Backend
[
[1, 1, 1],
[2, 2, 2],
[0, 3, 3],
[1, 4, 4],
[2, 0, 5],
[0, 1, 6],
[1, 2, 7],
[2, 3, 8],
[0, 4, 9],
[1, 0, 10],
[2, 1, 11],
[0, 2, 12],
[1, 3, 13],
[2, 4, 14],
[0, 0, 0]
]
>
targets = for n <- 1..1000, do: Nx.tensor([fizzbuzz.(n)])
[
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 1, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 1, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 1, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 1, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 1, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 1, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 1, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 1, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, 0, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 1, 0]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, 1]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, 0, 0, ...]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[1, 0, ...]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
[0, ...]
]
>,
#Nx.Tensor<
s64[1][4]
EXLA.Backend
[
...
]
>,
...
]
model =
Axon.input("fizzbuzz", shape: {nil, 3})
|> Axon.dense(10, activation: :relu)
|> Axon.dense(4, activation: :softmax)
#Axon<
inputs: %{"fizzbuzz" => {nil, 3}}
outputs: "softmax_0"
nodes: 5
>
Axon.Display.as_table(model, Nx.template({1, 3}, :s64)) |> IO.puts()
+------------------------------------------------------------------------------------------------------+
| Model |
+==================================+=============+==============+=================+====================+
| Layer | Input Shape | Output Shape | Options | Parameters |
+==================================+=============+==============+=================+====================+
| fizzbuzz ( input ) | [] | {1, 3} | shape: {nil, 3} | |
| | | | optional: false | |
+----------------------------------+-------------+--------------+-----------------+--------------------+
| dense_0 ( dense["fizzbuzz"] ) | [{1, 3}] | {1, 10} | | kernel: f32[3][10] |
| | | | | bias: f32[10] |
+----------------------------------+-------------+--------------+-----------------+--------------------+
| relu_0 ( relu["dense_0"] ) | [{1, 10}] | {1, 10} | | |
+----------------------------------+-------------+--------------+-----------------+--------------------+
| dense_1 ( dense["relu_0"] ) | [{1, 10}] | {1, 4} | | kernel: f32[10][4] |
| | | | | bias: f32[4] |
+----------------------------------+-------------+--------------+-----------------+--------------------+
| softmax_0 ( softmax["dense_1"] ) | [{1, 4}] | {1, 4} | | |
+----------------------------------+-------------+--------------+-----------------+--------------------+
Total Parameters: 84
Total Parameters Memory: 336 bytes
:ok
Axon.Display.as_graph(model, Nx.template({1, 3}, :s64))
null
data =
1..1000
|> Stream.map(fn n ->
tensor = Nx.tensor([mods.(n)])
label = Nx.tensor([fizzbuzz.(n)])
{tensor, label}
end)
#Stream<[enum: 1..1000, funs: [#Function<48.124013645/1 in Stream.map/2>]]>
params =
model
|> Axon.Loop.trainer(:categorical_cross_entropy, Axon.Optimizers.adamw(0.005))
|> Axon.Loop.metric(:accuracy)
|> Axon.Loop.run(data, %{}, epochs: 5, compiler: EXLA)
15:17:58.654 [debug] Forwarding options: [compiler: EXLA] to JIT compiler
Epoch: 0, Batch: 950, accuracy: 0.9158784 loss: 0.3955561
Epoch: 1, Batch: 950, accuracy: 1.0000000 loss: 0.2113254
Epoch: 2, Batch: 950, accuracy: 1.0000000 loss: 0.1426940
Epoch: 3, Batch: 950, accuracy: 1.0000000 loss: 0.1074346
Epoch: 4, Batch: 950, accuracy: 1.0000000 loss: 0.0860523
%{
"dense_0" => %{
"bias" => #Nx.Tensor<
f32[10]
EXLA.Backend
[-0.042269978672266006, 0.0, 1.9196816682815552, 0.01660083420574665, 0.7941468358039856, 1.4397428035736084, 1.5058162212371826, 0.03622683510184288, -0.10092532634735107, 0.5218181014060974]
>,
"kernel" => #Nx.Tensor<
f32[3][10]
EXLA.Backend
[
[0.16734740138053894, -0.10626289993524551, -4.037785053253174, -1.6120307445526123, -1.3427751064300537, -2.345513343811035, 0.8081691265106201, 2.50365948677063, 0.49454644322395325, -1.2651739120483398],
[0.10597733408212662, -0.21395926177501678, 0.24739858508110046, 0.27685025334358215, 1.48235285282135, 0.015700913965702057, -2.684476375579834, 0.7360167503356934, 0.3081551492214203, -0.46458956599235535],
[-0.2988255023956299, -0.569728434085846, 0.4854811131954193, 0.5067737698554993, 0.3781144917011261, 0.29420384764671326, 0.2774445712566376, 0.9394006729125977, -0.7755507230758667, 0.2194211483001709]
]
>
},
"dense_1" => %{
"bias" => #Nx.Tensor<
f32[4]
EXLA.Backend
[-0.1028248518705368, -0.7923071384429932, 0.8074134588241577, 0.22128091752529144]
>,
"kernel" => #Nx.Tensor<
f32[10][4]
EXLA.Backend
[
[-0.008159450255334377, 0.525536835193634, -0.31904321908950806, 0.08173605054616928],
[-0.479378879070282, -0.14598749577999115, 0.20385755598545074, -0.5622449517250061],
[0.5582160353660583, -0.0655285194516182, 1.2706323862075806, -0.8770636916160583],
[0.623023509979248, 0.008485288359224796, -1.0790836811065674, -0.053042806684970856],
[0.8895511627197266, -1.1578261852264404, 0.009290429763495922, 0.009255134500563145],
[0.17620179057121277, -0.010672553442418575, 0.8719890713691711, -0.583444356918335],
[-1.4218872785568237, 1.2568758726119995, 1.9537451267242432, -2.168077230453491],
[-0.6225107312202454, 0.20712487399578094, -1.844459056854248, 1.031597375869751],
[0.4422478973865509, 0.5520014762878418, 0.49499645829200745, 0.28010842204093933],
[0.5368578433990479, 0.029110711067914963, 0.8334208726882935, -0.4715330898761749]
]
>
}
}
{_init_fn, predict_fn} = Axon.build(model)
{#Function<135.4924062/2 in Nx.Defn.Compiler.fun/2>,
#Function<135.4924062/2 in Nx.Defn.Compiler.fun/2>}
guess = fn x ->
mod = Nx.tensor([mods.(x)])
case predict_fn.(params, mod) |> Nx.argmax() |> Nx.to_flat_list() do
[0] -> "fizz"
[1] -> "buzz"
[2] -> "fizzbuzz"
[3] -> "other"
end
end
#Function<42.3316493/1 in :erl_eval.expr/6>
guess.(3) |> IO.inspect(label: "3")
guess.(5) |> IO.inspect(label: "5")
guess.(15) |> IO.inspect(label: "15")
guess.(16) |> IO.inspect(label: "16")
guess.(15_432_115) |> IO.inspect(label: "15,432,115")
3: "fizz"
5: "buzz"
15: "fizzbuzz"
16: "other"
15,432,115: "buzz"
"buzz"