Powered by AppSignal & Oban Pro
Would you like to see your link here? Contact us

Chapt 6 notebook

chapt6.livemd

Chapt 6 notebook

Mix.install([
  # {:benchee, "~> 1.3"},
  # {:explorer, "~> 0.9.1"},
  # {:stb_image, "~> 0.6.9"},
  {:axon, "~> 0.6.1"},
  {:bumblebee, "~> 0.5.3"},
  {:exla, "~> 0.7.3"},
  {:kino, "~> 0.13.2"},
  {:kino_vega_lite, "~> 0.1.13"},
  {:nx, "~> 0.7.3"},
  {:scholar, "~> 0.3.1"},
  {:scidata, "~> 0.1.11"},
  {:table_rex, "~> 3.1"},
  {:tucan, "~> 0.3.0"},
  {:vega_lite, "~> 0.1.9"},
],
  config: [
    nx: [
      default_backend: EXLA.Backend,
      default_defn_options: [compiler: EXLA]
    ]
  ]
)

Neural Network just Nx

defmodule NeuralNetwork do
  import Nx.Defn

  defn predict(input, w1, b1, w2, b2) do
    input
    |> hidden(w1, b1)
    |> output(w2, b2)
  end

  defn hidden(input, weight, bias) do
    input
    |> dense(weight, bias)
    |> activation()
  end

  #defndelegate hidden/3
  defn output(input, weight, bias) do
    input
    |> dense(weight, bias)
    |> activation()
  end
  
  defn dense(input, weight, bias) do
    input
    |> Nx.dot(weight)
    |> Nx.add(bias)
  end

  defn activation(input) do
    Nx.sigmoid(input)
  end
end
key = Nx.Random.key(42)
{w1, new_key} = Nx.Random.uniform(key)
{b1, new_key} = Nx.Random.uniform(new_key)
{w2, new_key} = Nx.Random.uniform(new_key)
{b2, new_key} = Nx.Random.uniform(new_key)
Nx.Random.uniform_split(new_key, Nx.Constants.smallest_positive_normal({:f, 32}), 1, shape: {})
|> NeuralNetwork.predict(w1, b1, w2, b2)

Neural Network w/ Axon

{images, labels} = Scidata.MNIST.download()
{image_data, image_type, image_shape} = images
{label_data, label_type, label_shape} = labels

images = image_data
|> Nx.from_binary(image_type)
|> Nx.divide(255)
|> Nx.reshape({60_000, :auto})

labels = label_data
|> Nx.from_binary(label_type)
|> Nx.reshape(label_shape)
|> Nx.new_axis(-1)
|> Nx.equal(Nx.iota({1, 10}))
train_range = 0..49_999//1
test_range = 50_000..-1//1

train_images = images[train_range]
train_labels = labels[train_range]

test_images = images[test_range]
test_labels = labels[test_range]
batch_size = 64

train_data = train_images
|> Nx.to_batched(batch_size)
|> IO.inspect(label: :train_batch)
|> Stream.zip(Nx.to_batched(train_labels, batch_size))

test_data = test_images
|> Nx.to_batched(batch_size)
|> Stream.zip(Nx.to_batched(test_labels, batch_size))
model = "images"
|> Axon.input(shape: {nil, 784})
|> Axon.dense(128, activation: :relu)
|> Axon.dense(10, activation: :softmax)
template = Nx.template({1, 784}, :f32)
Axon.Display.as_table(model, template) |> IO.puts()

Why {1, 784}, where does this magic number 784 come from? Input images that are batches of vectors of dimensinoality of 784. Is this b/c of Streming batches of 64? In the data it was [50_0000][10] ?

train_batch: #Stream<[enum: 0..781, funs: [#Function<50.38948127/1 in Stream.map/2>]]>

So this is 0..781 batches of 64 from [50_000][10] in the train_data

50_000 / 64 = 781.25
Axon.Display.as_graph(model, template)
# Simple Supervised Training Loop
# Loss|Cost function = :categorical_cross_entropy
# Optimization = :sgd | Stochastic Gradient Descent
# Performance measure = :accuracy

trained_model_state = model
|> Axon.Loop.trainer(:categorical_cross_entropy, :sgd)
|> Axon.Loop.metric(:accuracy)
|> Axon.Loop.run(train_data, %{}, epochs: 128, compiler: EXLA)
model
|> Axon.Loop.evaluator()
|> Axon.Loop.metric(:accuracy)
|> Axon.Loop.run(test_data, trained_model_state, compiler: EXLA)
{test_batch, _} = Enum.at(test_data, 0)
test_image = test_batch[0]

test_image
|> Nx.reshape({28, 28})
|> Nx.to_heatmap()
{_, predict_fn} = Axon.build(model, compiler: EXLA)

probabilities = test_image
|> Nx.new_axis(0)
|> then(&amp;predict_fn.(trained_model_state, &amp;1))
Nx.argmax(probabilities)