Powered by AppSignal & Oban Pro

Download Hailo Models

livebooks/download_models.livemd

Download Hailo Models

Mix.install([
  {:req, "~> 0.5"},
  {:yaml_elixir, "~> 2.10"},
  {:jason, "~> 1.4"}
])

Section

This livebook downloads a compiled model (.hef) for Hailo-8 or Hailo-10 and generates the COCO class labels JSON file into the nx_hailo priv/ directory.

Models are sourced from the Hailo Model Zoo. Pre-compiled HEF files are hosted on S3; the URLs follow the pattern:

https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ModelZoo/Compiled///.hef

Supported device values include: hailo8, hailo8l, hailo8r (Hailo-8 family), and hailo10 (Hailo-10). Browse the model zoo repo to find the version/device/model combination you need.

Configuration

# Path to the target directory for download.
# When running via Attached Node, __DIR__ resolves to the local machine path,
# so we use the DOWNLOAD_DIR env var set by scripts/start_node.sh instead.
download_dir = System.get_env("DOWNLOAD_DIR") || to_string(:code.priv_dir(:nx_hailo))

# Model Zoo version (e.g. v2.15.0). Check the repo for the latest.
version = "v5.1.0"

# Device: "hailo8" | "hailo8l" | "hailo8r" (Hailo-8 family) or "hailo10h" (Hailo-10H)
device = "hailo10h"

# Model name (no .hef). Examples: "yolov8m", "yolov8s", "yolov5m"
model_name = "yolov8m"

# Build the HEF URL from version, device, and model
model_hef_url =
  "https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ModelZoo/Compiled/#{version}/#{device}/#{model_name}.hef"

# COCO dataset YAML from the Ultralytics repo — provides class name mappings (for YOLO COCO models).
dataset_yaml_url =
  "https://raw.githubusercontent.com/ultralytics/ultralytics/refs/heads/main/ultralytics/cfg/datasets/coco.yaml"

File.mkdir_p!(download_dir)

IO.puts("Will download: #{model_name}.hef for #{device}")

Download the HEF model file

defmodule ModelDownloader do
  def download(url, dest_path) do
    marker = dest_path <> ".marker"

    if File.exists?(marker) do
      IO.puts("Already downloaded: #{dest_path} — skipping.")
    else
      IO.puts("Downloading #{url} ...")
      %{headers: headers, body: body} = Req.get!(url)

      if "application/zip" in List.wrap(headers["content-type"]) do
        for {filename, contents} <- body do
          File.write!(Path.join(Path.dirname(dest_path), to_string(filename)), contents)
        end
      else
        File.write!(dest_path, body)
      end

      File.write!(marker, "")
      IO.puts("Saved to #{dest_path}")
    end
  end
end

ModelDownloader.download(model_hef_url, Path.join(download_dir, "#{model_name}.hef"))

Download and convert dataset class labels

Fetches the COCO YAML, extracts the names map, sorts by index, and writes a plain JSON array of class names that the inference pipeline expects.

classes_json_path = Path.join(download_dir, "#{model_name}_classes.json")

if File.exists?(classes_json_path) do
  IO.puts("Already exists: #{classes_json_path} — skipping.")
else
  IO.puts("Fetching dataset YAML from #{dataset_yaml_url} ...")
  %{body: yaml_string} = Req.get!(dataset_yaml_url)

  class_list =
    yaml_string
    |> YamlElixir.read_from_string!()
    |> Map.fetch!("names")
    |> Enum.sort_by(fn {index, _name} -> index end)
    |> Enum.map(fn {_index, name} -> name end)

  File.write!(classes_json_path, Jason.encode!(class_list))
  IO.puts("Saved #{length(class_list)} classes to #{classes_json_path}")
end