FFix intro: one clip, a few useful edits
What we’ll build
Let’s take one pleasant sample video and use it as a small editing workbench.
We will build up gradually:
- load and preview a source clip
-
build the smallest useful
FFix.command/3 - add filters one at a time
- add centered title text
- save a still image
- make a phone-friendly version
- branch one graph into a few outputs
- observe runner events and progress
- finish with a small visual ffmpeg easter egg
The point is not to hide ffmpeg. The point is to keep ffmpeg concepts visible while making commands and filtergraphs easier to compose from Elixir.
Setup
This notebook uses:
-
FFixfrom GitHub -
Kinofor inline previews and controls -
Reqto download a small CC0 sample clip
Mix.install(
[
{:ffix, github: "akash-akya/ffix"},
{:kino, "~> 0.19"},
{:req, "~> 0.5"}
],
consolidate_protocols: false
)
import FFix
import FFix.Filter
unless System.find_executable("ffmpeg") do
raise "ffmpeg must be available on PATH to run this notebook"
end
work_dir = Path.join(System.tmp_dir!(), "ffix-edit-workbench-#{System.unique_integer([:positive])}")
File.mkdir_p!(work_dir)
Kino.Markdown.new("Working in `#{work_dir}`")
defmodule FFixIntro do
def run!(command, options \\ []) do
# Keep stderr by default. ffmpeg explains most failures there, and FFix can
# parse ffmpeg-style log/progress lines from stderr for runner events.
FFix.run!(command, Keyword.merge([stderr: :collect], options))
end
def video_file(path), do: Kino.Video.new(File.read!(path), :mp4)
def video_bytes(bytes), do: Kino.Video.new(bytes, :mp4)
def image_file(path), do: Kino.Image.new(File.read!(path), :jpeg)
def download(path) do
Kino.Download.new(fn -> File.read!(path) end, filename: Path.basename(path))
end
def command_card(command) do
fenced("Command FFix will run", FFix.to_shell_string(command))
end
def graph_card(%FFix.Command{} = command) do
# Callback inputs use internal refs until command serialization. Reading the
# graph from argv shows the resolved ffmpeg labels, such as [0:v].
command
|> FFix.to_argv()
|> filter_complex_arg()
|> then(&fenced("Filtergraph generated by FFix", &1))
end
def event_table(events) do
rows =
Enum.flat_map(events, fn
{:start, %{argv: argv}} ->
[%{event: "start", detail: Enum.join(Enum.take(argv, 4), " "), frame: nil, speed: nil}]
{:progress, progress} ->
[%{event: "progress", detail: inspect(progress.status), frame: progress.frame, speed: progress.speed}]
{:log, log} ->
[%{event: "log", detail: "#{log.level}: #{log.message}", frame: nil, speed: nil}]
{:exit, result} ->
[%{event: "exit", detail: "status #{inspect(result.exit_status)}", frame: nil, speed: nil}]
_other ->
[]
end)
Kino.DataTable.new(rows)
end
def int(value, default, min, max) do
value = if is_number(value), do: round(value), else: default
value |> Kernel.max(min) |> Kernel.min(max)
end
def text(value, default) do
value = value |> to_string() |> String.trim()
if value == "", do: default, else: value
end
defp filter_complex_arg(["-filter_complex", graph | _rest]), do: graph
defp filter_complex_arg([_arg | rest]), do: filter_complex_arg(rest)
defp filter_complex_arg([]), do: "(no -filter_complex; this command maps streams directly)"
defp fenced(title, body) do
fence = String.duplicate("`", 3)
Kino.Markdown.new("""
#{title}:
#{fence}text
#{body}
#{fence}
""")
end
end
1. Load the source clip
We use one fixed CC0 sample video so the notebook has a consistent story.
This is an intentional hard dependency for the tutorial: if the download fails, fix the network/sample URL and re-run this cell.
source_url = "https://interactive-examples.mdn.mozilla.net/media/cc0-videos/flower.mp4"
source_path = Path.join(work_dir, "source.mp4")
# The tutorial intentionally depends on this small remote MP4.
# One fixed clip keeps every following output easy to compare.
response = Req.get!(source_url, receive_timeout: 15_000)
unless response.status == 200 and is_binary(response.body) do
raise "expected a downloadable MP4 at #{source_url}, got status #{inspect(response.status)}"
end
File.write!(source_path, response.body)
Kino.Layout.grid([
Kino.Markdown.new("Downloaded the source clip from `#{source_url}`."),
FFixIntro.video_file(source_path)
])
2. First command: trim a tiny preview
The main API shape is:
command(inputs, graph_callback, output_callback)
In this first version there is no real filtergraph yet:
-
input(source_path, t: "2")declares an ffmpeg input and asks ffmpeg to read only two seconds -
the graph callback receives
src, a command input handle -
src[:video]selects the input video stream -
output(:stdout, ...)writes a tiny MP4 to stdout, so Livebook can render it without creating a file
preview_cmd =
command(
# Input options render before `-i`; this becomes roughly `-t 2 -i source.mp4`.
input(source_path, t: "2"),
fn src ->
# Select the first video stream. No filters yet.
src[:video]
end,
fn video ->
# MP4 on stdout needs fragmented MP4 flags so it can stream as bytes.
output(:stdout,
video: video,
f: :mp4,
# `movflags` is a flags option; lists serialize as `frag_keyframe+empty_moov`.
movflags: [:frag_keyframe, :empty_moov],
# Quoted atoms/strings are useful for ffmpeg stream-specific option names.
"c:v": :libx264,
# yuv420p is the most broadly playable pixel format for browser previews.
pix_fmt: :yuv420p,
# Disable audio for this tiny byte preview.
an: true
)
end,
global: [y: true]
)
preview = FFixIntro.run!(preview_cmd, stdout: :collect)
Kino.Layout.grid([
FFixIntro.command_card(preview_cmd),
FFixIntro.video_bytes(preview.stdout)
])
3. Add the first filter chain
Now we start doing video work.
Filters compose naturally with the Elixir pipe operator. Here we:
- trim timestamps to start at zero
- normalize frame rate
- resize/crop to 1280×720
- output another tiny MP4 directly to Livebook
normalized_cmd =
command(
input(source_path),
fn src ->
src[:video]
# `trim` cuts frames, but it keeps the original timestamps.
|> trim(duration: "3")
# Reset timestamps so the filtered preview starts at t=0.
|> setpts(expr: "PTS-STARTPTS")
# Normalize frame cadence for predictable downstream outputs.
|> fps(fps: 30)
# Fill a 16:9 canvas without letterboxing, then crop the overflow.
|> scale(w: 1280, h: 720, force_original_aspect_ratio: :increase)
|> crop(w: 1280, h: 720)
end,
fn video ->
output(:stdout,
video: video,
f: :mp4,
# Fragmented MP4 lets ffmpeg write MP4 to a pipe instead of seeking.
movflags: [:frag_keyframe, :empty_moov],
"c:v": :libx264,
pix_fmt: :yuv420p,
# Encoder speed/quality tradeoff; useful for quick tutorial runs.
preset: :veryfast,
an: true
)
end,
global: [y: true]
)
normalized = FFixIntro.run!(normalized_cmd, stdout: :collect)
Kino.Layout.grid([
FFixIntro.graph_card(normalized_cmd),
FFixIntro.video_bytes(normalized.stdout)
])
4. Add centered title text
Now we add a title card. This is where FFix.expr/1 is useful: ffmpeg expressions such as (w-tw)/2 should be passed through as ffmpeg expression syntax.
We also use enable: to show the title only during part of the clip.
title_input = Kino.Input.text("Video title", default: "A tiny garden story")
channel_input = Kino.Input.text("Channel name", default: "FFix Studio")
duration_input = Kino.Input.number("Clip duration", default: 4, min: 2, max: 8, step: 1)
Kino.Layout.grid([title_input, channel_input, duration_input])
title = FFixIntro.text(Kino.Input.read(title_input), "A tiny garden story")
channel = FFixIntro.text(Kino.Input.read(channel_input), "FFix Studio")
duration_seconds = FFixIntro.int(Kino.Input.read(duration_input), 4, 2, 8)
# Many ffmpeg duration options prefer strings such as "4" or "00:00:04".
duration = Integer.to_string(duration_seconds)
# Leave a little tail without text so the title does not cover the whole clip.
enable_until = max(duration_seconds - 0.4, 1.5)
titled_path = Path.join(work_dir, "01-titled-video.mp4")
titled_cmd =
command(
input(source_path),
fn src ->
video =
src[:video]
|> trim(duration: duration)
|> setpts(expr: "PTS-STARTPTS")
|> fps(fps: 30)
|> scale(w: 1280, h: 720, force_original_aspect_ratio: :increase)
|> crop(w: 1280, h: 720)
|> drawtext(
text: title,
# w/h are frame dimensions; tw/th are rendered text dimensions.
x: expr("(w-tw)/2"),
y: expr("(h-th)/2"),
fontsize: 52,
fontcolor: "white",
box: true,
boxcolor: "black@0.45",
boxborderw: 18,
# Timeline editing: enable this drawtext filter only while true.
enable: expr("between(t,0.4,#{enable_until})")
)
# A quiet placeholder audio track keeps video players happy without adding noise.
audio = anullsrc(channel_layout: :stereo, sample_rate: 48_000, duration: duration)
[video: video, audio: audio]
end,
fn [video: video, audio: audio] ->
output(titled_path,
video: video,
audio: audio,
"c:v": :libx264,
"c:a": :aac,
pix_fmt: :yuv420p,
preset: :veryfast,
# Stop when the generated silent audio or filtered video ends.
shortest: true
)
end,
global: [y: true]
)
FFixIntro.run!(titled_cmd)
Kino.Layout.grid([
FFixIntro.graph_card(titled_cmd),
FFixIntro.video_file(titled_path)
])
5. Save a still image
A still image is a useful companion to a video. We seek to one second, grab a frame, resize it, and add centered text.
thumbnail_title_input = Kino.Input.text("Thumbnail title", default: title)
thumbnail_path = Path.join(work_dir, "02-thumbnail.jpg")
thumbnail_title_input
thumbnail_title = FFixIntro.text(Kino.Input.read(thumbnail_title_input), title)
thumbnail_cmd =
command(
# `ss` before `-i` asks ffmpeg to seek before decoding, which is fast.
input(source_path, ss: "00:00:01"),
fn src ->
src[:video]
|> scale(w: 1280, h: 720, force_original_aspect_ratio: :increase)
|> crop(w: 1280, h: 720)
|> drawtext(
text: thumbnail_title,
x: expr("(w-tw)/2"),
y: expr("(h-th)/2"),
fontsize: 64,
fontcolor: "white",
box: true,
boxcolor: "black@0.5",
boxborderw: 24
)
|> drawtext(
text: channel,
x: expr("(w-tw)/2"),
# Place the channel name just below the centered title.
y: expr("(h+th)/2+56"),
fontsize: 30,
fontcolor: "white"
)
end,
fn image ->
output(thumbnail_path,
video: image,
# One encoded video frame becomes one JPEG.
"frames:v": 1,
# Lower q:v is better JPEG quality in ffmpeg's image2 muxer.
"q:v": 3,
f: :image2
)
end,
global: [y: true]
)
FFixIntro.run!(thumbnail_cmd)
Kino.Layout.grid([
FFixIntro.graph_card(thumbnail_cmd),
FFixIntro.image_file(thumbnail_path)
])
6. Make a phone-friendly version
The same source can become a 9:16 phone-friendly clip. The only big difference is the target canvas: 1080×1920.
phone_path = Path.join(work_dir, "03-phone-friendly.mp4")
phone_cmd =
command(
input(source_path),
fn src ->
video =
src[:video]
|> trim(duration: duration)
|> setpts(expr: "PTS-STARTPTS")
|> fps(fps: 30)
# Vertical phone canvas. `increase` fills the frame; crop chooses the center.
|> scale(w: 1080, h: 1920, force_original_aspect_ratio: :increase)
|> crop(w: 1080, h: 1920)
|> drawtext(
text: title,
x: expr("(w-tw)/2"),
y: expr("(h-th)/2"),
fontsize: 72,
fontcolor: "white",
box: true,
boxcolor: "black@0.5",
boxborderw: 24,
enable: expr("between(t,0.4,#{enable_until})")
)
# Use generated silent audio so the phone-friendly clip has an audio stream but no noise.
audio = anullsrc(channel_layout: :stereo, sample_rate: 48_000, duration: duration)
[video: video, audio: audio]
end,
fn [video: video, audio: audio] ->
output(phone_path,
video: video,
audio: audio,
"c:v": :libx264,
"c:a": :aac,
pix_fmt: :yuv420p,
preset: :veryfast,
shortest: true
)
end,
global: [y: true]
)
FFixIntro.run!(phone_cmd)
Kino.Layout.grid([
FFixIntro.graph_card(phone_cmd),
FFixIntro.video_file(phone_path)
])
7. Branch one graph into a few outputs
Now we combine the ideas.
split/2 creates multiple branches from one filtered stream. This command writes three outputs in one ffmpeg run:
- 16:9 video
- 9:16 phone-friendly video
- review contact sheet
sheet_layout_input = Kino.Input.select("Review sheet layout", [{"2x2", "2 × 2"}, {"3x2", "3 × 2"}], default: "2x2")
sheet_layout_input
sheet_layout = Kino.Input.read(sheet_layout_input)
wide_path = Path.join(work_dir, "04-wide-video.mp4")
phone_branch_path = Path.join(work_dir, "04-phone-friendly.mp4")
sheet_path = Path.join(work_dir, "04-review-sheet.jpg")
branch_cmd =
command(
input(source_path),
fn src ->
# Normalize once before splitting, so each branch starts from the same clean base.
base =
src[:video]
|> trim(duration: duration)
|> setpts(expr: "PTS-STARTPTS")
|> fps(fps: 30)
# ffmpeg filter outputs generally need explicit fan-out before reuse.
# `split` gives us independent branches for independent outputs.
[wide, phone, sheet] = split(base, outputs: 3)
wide_video =
wide
|> scale(w: 1280, h: 720, force_original_aspect_ratio: :increase)
|> crop(w: 1280, h: 720)
|> drawtext(
text: title,
x: expr("(w-tw)/2"),
y: expr("(h-th)/2"),
fontsize: 44,
fontcolor: "white",
box: true,
boxcolor: "black@0.45",
boxborderw: 16,
enable: expr("between(t,0.4,#{enable_until})")
)
phone_video =
phone
|> scale(w: 1080, h: 1920, force_original_aspect_ratio: :increase)
|> crop(w: 1080, h: 1920)
|> drawtext(
text: title,
x: expr("(w-tw)/2"),
y: expr("(h-th)/2"),
fontsize: 72,
fontcolor: "white",
box: true,
boxcolor: "black@0.5",
boxborderw: 24,
enable: expr("between(t,0.4,#{enable_until})")
)
review_sheet =
sheet
# Sample one frame per second, then tile those frames into a contact sheet.
|> fps(fps: 1)
|> scale(w: 180, h: -1)
|> tile(layout: sheet_layout, margin: 8, padding: 4, color: "white")
[
wide_video: wide_video,
phone_video: phone_video,
review_sheet: review_sheet,
wide_audio: anullsrc(channel_layout: :stereo, sample_rate: 48_000, duration: duration),
phone_audio: anullsrc(channel_layout: :stereo, sample_rate: 48_000, duration: duration)
]
end,
fn outputs ->
[
output(wide_path,
video: outputs[:wide_video],
audio: outputs[:wide_audio],
"c:v": :libx264,
"c:a": :aac,
pix_fmt: :yuv420p,
preset: :veryfast,
shortest: true
),
output(phone_branch_path,
video: outputs[:phone_video],
audio: outputs[:phone_audio],
"c:v": :libx264,
"c:a": :aac,
pix_fmt: :yuv420p,
preset: :veryfast,
shortest: true
),
output(sheet_path,
video: outputs[:review_sheet],
# The sheet branch is a video stream with one frame; write it as JPEG.
"frames:v": 1,
"q:v": 3,
f: :image2
)
]
end,
global: [y: true]
)
FFixIntro.run!(branch_cmd)
Kino.Layout.grid([
FFixIntro.graph_card(branch_cmd),
FFixIntro.video_file(wide_path),
FFixIntro.video_file(phone_branch_path),
FFixIntro.image_file(sheet_path)
])
8. Observe runner events and progress
FFix.run!/2 is convenient, but the runner can also stream events.
Here we run the multi-output command again with progress: true, then display the important events in a table.
events =
branch_cmd
# `progress: true` adds `-progress pipe:2`; ffmpeg emits machine-readable
# progress lines, and FFix turns them into `{:progress, %Progress{}}` events.
|> FFix.stream(progress: true, stderr: :collect)
|> Enum.to_list()
Kino.Layout.grid([
FFixIntro.event_table(events),
FFixIntro.video_file(wide_path),
FFixIntro.image_file(sheet_path)
])
9. Tiny easter egg: a weird generated bumper
ffmpeg has wonderful source filters hiding in plain sight. Let’s make a tiny visual bumper from one of them. No input file needed.
egg_input = Kino.Input.select("Easter egg bumper", [sierpinski: "Sierpinski", mandelbrot: "Mandelbrot", life: "Game of Life", cellauto: "Rule 110"], default: :mandelbrot)
egg_input
bumper_path = Path.join(work_dir, "05-easter-egg-bumper.mp4")
weird_source =
case Kino.Input.read(egg_input) do
:sierpinski ->
# Source filters have no input pads; they synthesize media inside ffmpeg.
sierpinski(size: "1280x720", rate: 30, seed: 42, type: :triangle)
:mandelbrot ->
# The default easter egg: a generated fractal zoom, still just a video stream.
mandelbrot(size: "1280x720", rate: 30, maxiter: 1200, start_scale: "3", end_scale: "0.25")
:life ->
life(size: "1280x720", rate: 30, random_seed: 42, ratio: "0.22", life_color: "cyan")
:cellauto ->
cellauto(size: "1280x720", rate: 30, rule: 110, random_seed: 42)
end
bumper_cmd =
command(
[],
fn [] ->
video =
weird_source
|> drawtext(
text: channel,
x: expr("(w-tw)/2"),
y: expr("(h-th)/2"),
fontsize: 56,
fontcolor: "white",
box: true,
boxcolor: "black@0.5",
boxborderw: 20
)
# Add silence for container compatibility; the visual source itself has no audio.
audio = anullsrc(channel_layout: :stereo, sample_rate: 48_000, duration: "3")
[video: video, audio: audio]
end,
fn [video: video, audio: audio] ->
output(bumper_path,
video: video,
audio: audio,
"c:v": :libx264,
"c:a": :aac,
pix_fmt: :yuv420p,
preset: :veryfast,
shortest: true,
t: 3
)
end,
global: [y: true]
)
FFixIntro.run!(bumper_cmd)
Kino.Layout.grid([
FFixIntro.graph_card(bumper_cmd),
FFixIntro.video_file(bumper_path)
])
Wrap-up
You tried a small editing workflow from one source clip:
- previewed a simple command from stdout bytes
- composed a filter chain
-
used centered
drawtextand timelineenable: - saved a still image
- made a phone-friendly version
- branched one graph into multiple outputs
- inspected runner progress events
- generated a playful bumper from ffmpeg source filters
The nice part: each step is ordinary Elixir data until the final execution boundary.
Kino.Layout.grid([
FFixIntro.download(titled_path),
FFixIntro.download(thumbnail_path),
FFixIntro.download(phone_path),
FFixIntro.download(wide_path),
FFixIntro.download(phone_branch_path),
FFixIntro.download(sheet_path),
FFixIntro.download(bumper_path)
])