Powered by AppSignal & Oban Pro

Boombox basic examples

examples/basic.livemd

Boombox basic examples

Logger.configure(level: :info)

# For ffmpeg and ffplay commands to work on Mac Livebook Desktop
System.put_env("PATH", "/opt/homebrew/bin:#{System.get_env("PATH")}")

# In case of problems installing Nx/EXLA/Bumblebee,
# you can remove them and the Nx backend config below.
# Examples that don't mention them should still work.

# MIX_INSTALL_CONFIG_BEGIN
# boombox = {:boombox, github: "membraneframework/boombox"}
boombox = {:boombox, path: "/Users/feliks/membrane/boombox"}

# This livebook uses boombox from the master branch. If any examples happen to not work, the latest stable version of this livebook
# can be found on https://hexdocs.pm/boombox/basic.html or in the latest github release.
# MIX_INSTALL_CONFIG_END

Mix.install([
  boombox,
  :kino,
  :nx,
  :exla,
  :bumblebee,
  :websockex,
  :membrane_simple_rtsp_server,
  {:coerce, ">= 1.0.2"}
])

Nx.global_default_backend(EXLA.Backend)

# HTTP server for assets
data_dir = "/tmp/boombox_examples_data"
input_dir = "#{data_dir}/input"
File.mkdir_p!(input_dir)
out_dir = "#{data_dir}/output"
File.mkdir_p!(out_dir)

# match in case a dependency already started :inets
case :inets.start() do
  :ok -> :ok
  {:error, {:already_started, :inets}} -> :ok
  err -> raise "Unexpected value returned by :inets.start/0: #{inspect(err)}"
end

case :inets.start(:httpd,
  bind_address: ~c"localhost",
  port: 1234,
  document_root: ~c"#{data_dir}",
  server_name: ~c"assets_server",
  server_root: ~c"/tmp",
  erl_script_nocache: true
) do
  {:ok, _server} -> :ok
  # port already in use — server likely started from another livebook
  {:error, _} -> :ok
end

Setup

👋 Here are some basic examples of using Boombox, covering file I/O, format transcoding, and simple stream round-trips. Some of them use ffmpeg to generate a stream. Some use ffplay to playback generated videos, but you can use any other player, for example VLC.

The cell below downloads assets to be used in the examples. The setup cell started an HTTP server on port 1234 that will serve static HTML files for sending/receiving the stream in the browser.

samples_url = "https://raw.githubusercontent.com/membraneframework/static/gh-pages/samples"

for {filename, remote} <- [
      {"bun.mp4", "big-buck-bunny/bun33s.mp4"},
      {"ffmpeg-testsrc.h264", "ffmpeg-testsrc.h264"},
      {"test-audio.aac", "test-audio.aac"}
    ],
    path = "#{input_dir}/#{filename}",
    not File.exists?(path) do
  %{status: 200, body: data} = Req.get!("#{samples_url}/#{remote}")
  File.write!(path, data)
end

assets_url =
  "https://raw.githubusercontent.com/membraneframework/boombox/master/examples/data"

for asset <- ["webrtc_from_browser", "webrtc_to_browser", "whip"],
    path = "#{data_dir}/#{asset}.html",
    not File.exists?(path) do
  %{status: 200, body: data} = Req.get!("#{assets_url}/#{asset}.html")
  File.write!(path, data)
end

Play MP4 file

Boombox.play("#{input_dir}/bun.mp4")
# or, alternatively, Boombox.run(input: "#{input_dir}/bun.mp4", output: :player)

Transcoding H264 to VP9 within an IVF Container

Boombox.run(
  input: {:h264, "#{input_dir}/ffmpeg-testsrc.h264"},
  output: "#{out_dir}/ffmpeg-testsrc.ivf"
)
System.shell("ffplay #{out_dir}/ffmpeg-testsrc.ivf")

Transcode AAC to Opus within an OGG container

Boombox.run(
  input: "#{input_dir}/test-audio.aac",
  output: "#{out_dir}/test-audio.ogg"
)
System.shell("ffplay #{out_dir}/test-audio.ogg")

WebRTC proxy

Visit http://localhost:1234/webrtc_from_browser.html to send the stream and http://localhost:1234/webrtc_to_browser.html to receive it

Boombox.run(input: {:webrtc, "ws://localhost:8829"}, output: {:webrtc, "ws://localhost:8830"})

Record WebRTC via WHIP to MP4

To send the stream, visit http://localhost:1234/whip.html.

Note: don’t stop this cell to finish recording - click ‘disconnect’ or close the browser tab instead, so the recording is finalized properly.

Boombox.run(
  input: {:whip, "http://localhost:8829", token: "whip_it!"},
  output: "#{out_dir}/webrtc_to_mp4.mp4"
)
System.shell("ffplay #{out_dir}/webrtc_to_mp4.mp4")

Record RTMP to MP4

uri = "rtmp://localhost:5432"

t =
  Task.async(fn ->
    Boombox.run(input: uri, output: "#{out_dir}/rtmp_to_mp4.mp4")
  end)

{_output, 0} = System.shell("ffmpeg -re -i #{input_dir}/bun.mp4 -c copy -f flv #{uri}")

Task.await(t)
System.shell("ffplay #{out_dir}/rtmp_to_mp4.mp4")

Stream MP4 via WebRTC, receive it and record to MP4 again

signaling = Membrane.WebRTC.Signaling.new()

t =
  Task.async(fn ->
    Boombox.run(input: "#{input_dir}/bun.mp4", output: {:webrtc, signaling})
  end)

Boombox.run(input: {:webrtc, signaling}, output: "#{out_dir}/mp4_webrtc_mp4.mp4")

Task.await(t)
System.shell("ffplay #{out_dir}/mp4_webrtc_mp4.mp4")