Super resolution
Mix.install([
  {:tflite_elixir, "~> 0.3.0"},
  {:evision, "0.1.31"},
  {:req, "~> 0.3.0"},
  {:kino, "~> 0.9.0"}
])Introduction
The task of recovering a high resolution (HR) image from its low resolution counterpart is commonly referred to as Single Image Super Resolution (SISR).
The model used here is Enhanced Super-Resolution Generative Adversarial Networks (ESRGAN). And we are going to use TensorFlow Lite to run inference on the pretrained model.
https://www.tensorflow.org/lite/examples/super_resolution/overview
It’s useful to alias the module as something shorter when we make extensive use of the functions from certain modules.
alias Evision, as: Cv
alias TFLiteElixir, as: TFLite
alias TFLiteElixir.TFLiteTensorDownload data files
- model using ESRGAN-TF2
- test image from tensorflow examples
# /data is the writable portion of a Nerves system
downloads_dir =
  if Code.ensure_loaded?(Nerves.Runtime), do: "/data/livebook", else: System.tmp_dir!()
download = fn url ->
  save_as = Path.join(downloads_dir, URI.encode_www_form(url))
  unless File.exists?(save_as), do: Req.get!(url, output: save_as)
  save_as
end
data_files =
  [
    model: "https://tfhub.dev/captain-pool/lite-model/esrgan-tf2/1?lite-format=tflite",
    test_img:
      "https://raw.githubusercontent.com/tensorflow/examples/master/lite/examples/super_resolution/android/app/src/main/assets/lr-1.jpg"
  ]
  |> Enum.map(fn {key, url} -> {key, download.(url)} end)
  |> Map.new()
data_files
|> Enum.map(fn {k, v} -> [name: k, location: v] end)
|> Kino.DataTable.new(name: "Data files")test_image_input = Kino.Input.image(" ")Generate a super resolution image using TensorFlow Lite
test_image_mat =
  case Kino.Input.read(test_image_input) do
    %{data: data, height: height, width: width} ->
      Cv.Mat.from_binary(data, {:u, 8}, height, width, 3)
      |> Cv.cvtColor(Cv.Constant.cv_COLOR_BGR2RGB())
    nil ->
      Cv.imread(data_files.test_img)
  end
lr =
  test_image_mat
  |> Cv.Mat.to_nx()
  |> Nx.new_axis(0)
  |> Nx.as_type({:f, 32})
# Load TFLite model and allocate tensors.
{:ok, interpreter} = TFLite.Interpreter.new(data_files.model)
# Get input and output tensors.
{:ok, input_tensors} = TFLite.Interpreter.inputs(interpreter)
{:ok, output_tensors} = TFLite.Interpreter.outputs(interpreter)
# Run the model
TFLite.Interpreter.input_tensor(interpreter, 0, Nx.to_binary(lr))
TFLite.Interpreter.invoke(interpreter)
# Extract the output and postprocess it
{:ok, output_data} = TFLite.Interpreter.output_tensor(interpreter, 0)
%TFLiteTensor{} = out_tensor = TFLite.Interpreter.tensor(interpreter, Enum.at(output_tensors, 0))
[1 | shape] = TFLite.TFLiteTensor.dims(out_tensor)
type = TFLite.TFLiteTensor.type(out_tensor)
sr =
  output_data
  |> Nx.from_binary(type)
  |> Nx.reshape(List.to_tuple(shape))
  |> Nx.clip(0, 255)
  |> Nx.as_type({:u, 8})Visualize the result
[
  ["Low resolution", test_image_mat],
  ["High resolution", Cv.Mat.from_nx_2d(sr)]
]
|> Enum.map(fn [label, img] ->
  Kino.Layout.grid([img, Kino.Markdown.new("**#{label}**")], boxed: true)
end)
|> Kino.Layout.grid(columns: 2)