Weekly Delivery Report
Purpose
This notebook fetches YouTrack issues for last week (configurable), builds two windows, and emits structured outputs designed to be fed to LLMs for product/engineering leadership reporting.
Windows generated:
- Last working day report
- Last week report
Highlights included per issue:
- State transitions in the window
-
Checklist signal (
- [ ]/- [x]in description) - Description changes with compact before/after excerpts
-
Special tags (
on hold,blocked,to be specifiedby default) - Comments posted in the window
-
Cycle time starts at first transition from inactivity (
todo/no state) to active state - Net active time excludes hold/blocked periods
Setup
Mix.install([
{:youtrack, path: "./youtrack"},
{:kino, "~> 0.14"},
{:jason, "~> 1.4"}
])
alias Youtrack.{Client, WeeklyReport, Workstreams, WorkstreamsLoader}
Inputs
today = Date.utc_today()
this_week_start = Date.beginning_of_week(today, :monday)
last_week_start_default = Date.add(this_week_start, -7)
last_week_end_default = Date.add(this_week_start, -1)
last_working_day_default =
case Date.day_of_week(today) do
1 -> Date.add(today, -3)
7 -> Date.add(today, -2)
6 -> Date.add(today, -1)
_ -> Date.add(today, -1)
end
base_url_in =
Kino.Input.text(
"YouTrack base URL",
default: System.get_env("YOUTRACK_BASE_URL") || "https://your-instance.youtrack.cloud"
)
token_in =
Kino.Input.password(
"Permanent token",
default: System.get_env("YOUTRACK_TOKEN") || ""
)
base_query_in =
Kino.Input.text(
"YouTrack base query (e.g. project: MYPROJECT)",
default: System.get_env("YOUTRACK_BASE_QUERY") || "project: MYPROJECT"
)
week_start_in =
Kino.Input.text(
"Week start date (ISO, inclusive)",
default: System.get_env("YOUTRACK_REPORT_WEEK_START") || Date.to_iso8601(last_week_start_default)
)
week_end_in =
Kino.Input.text(
"Week end date (ISO, inclusive)",
default: System.get_env("YOUTRACK_REPORT_WEEK_END") || Date.to_iso8601(last_week_end_default)
)
last_working_day_in =
Kino.Input.text(
"Last working day date (ISO)",
default: System.get_env("YOUTRACK_REPORT_LAST_WORKING_DAY") || Date.to_iso8601(last_working_day_default)
)
state_field_in =
Kino.Input.text(
"State field name",
default: System.get_env("YOUTRACK_STATE_FIELD") || "State"
)
assignees_field_in =
Kino.Input.text(
"Assignees field name",
default: System.get_env("YOUTRACK_ASSIGNEES_FIELD") || "Assignee"
)
in_progress_names_in =
Kino.Input.text(
"In Progress states (comma-separated)",
default: System.get_env("YOUTRACK_IN_PROGRESS") || "In Progress"
)
inactive_names_in =
Kino.Input.text(
"Inactive states (comma-separated)",
default: System.get_env("YOUTRACK_REPORT_INACTIVE_STATES") || "To Do, Todo"
)
done_names_in =
Kino.Input.text(
"Final states (comma-separated)",
default: System.get_env("YOUTRACK_REPORT_DONE") || "Done, Won't Do"
)
special_tags_in =
Kino.Input.text(
"Special tags to highlight (comma-separated)",
default: System.get_env("YOUTRACK_REPORT_SPECIAL_TAGS") || "on hold, blocked, to be specified"
)
hold_tags_in =
Kino.Input.text(
"Hold tags excluded from net active time (comma-separated)",
default: System.get_env("YOUTRACK_REPORT_HOLD_TAGS") || "on hold, blocked"
)
project_prefix_in =
Kino.Input.text(
"Filter by issue ID prefix (optional)",
default: System.get_env("YOUTRACK_PROJECT_PREFIX") || ""
)
include_substreams_in =
Kino.Input.checkbox(
"Include substreams (expand to parents)",
default: true
)
activities_categories_in =
Kino.Input.text(
"Activities categories",
default:
System.get_env("YOUTRACK_REPORT_ACTIVITY_CATEGORIES") ||
"CustomFieldCategory,TagsCategory,DescriptionCategory"
)
Kino.Layout.grid(
[
base_url_in,
token_in,
base_query_in,
week_start_in,
week_end_in,
last_working_day_in,
state_field_in,
assignees_field_in,
in_progress_names_in,
inactive_names_in,
done_names_in,
special_tags_in,
hold_tags_in,
project_prefix_in,
include_substreams_in,
activities_categories_in
],
columns: 2
)
Load Workstreams + Fetch Issues
base_url = Kino.Input.read(base_url_in) |> String.trim()
token = Kino.Input.read(token_in) |> String.trim()
base_query = Kino.Input.read(base_query_in) |> String.trim()
state_field = Kino.Input.read(state_field_in) |> String.trim()
assignees_field = Kino.Input.read(assignees_field_in) |> String.trim()
project_prefix = Kino.Input.read(project_prefix_in) |> String.trim()
include_substreams? = Kino.Input.read(include_substreams_in)
week_start = Kino.Input.read(week_start_in) |> Date.from_iso8601!()
week_end = Kino.Input.read(week_end_in) |> Date.from_iso8601!()
last_working_day = Kino.Input.read(last_working_day_in) |> Date.from_iso8601!()
in_progress_names =
Kino.Input.read(in_progress_names_in)
|> String.split(",", trim: true)
|> Enum.map(&String.trim/1)
inactive_names =
Kino.Input.read(inactive_names_in)
|> String.split(",", trim: true)
|> Enum.map(&String.trim/1)
done_names =
Kino.Input.read(done_names_in)
|> String.split(",", trim: true)
|> Enum.map(&String.trim/1)
special_tags =
Kino.Input.read(special_tags_in)
|> String.split(",", trim: true)
|> Enum.map(&String.trim/1)
hold_tags =
Kino.Input.read(hold_tags_in)
|> String.split(",", trim: true)
|> Enum.map(&String.trim/1)
activities_categories = Kino.Input.read(activities_categories_in) |> String.trim()
{workstream_rules, workstreams_path} = WorkstreamsLoader.load_from_default_paths()
if workstreams_path do
Kino.render(Kino.Markdown.new("Loading workstreams from `#{workstreams_path}`"))
else
Kino.render(Kino.Markdown.new("No workstreams.yaml found, using empty config"))
end
query = "#{base_query} updated: #{Date.to_iso8601(week_start)} .. #{Date.to_iso8601(week_end)}"
Kino.render(Kino.Markdown.new("**Query:** `#{query}`"))
issue_fields = [
"idReadable",
"id",
"summary",
"description",
"created",
"updated",
"resolved",
"project(shortName)",
"tags(name)",
"comments(id,text,created,author(name,login))",
"customFields(name,value(name,login))"
]
req = Client.new!(base_url, token)
raw_issues = Client.fetch_issues!(req, query, fields: issue_fields)
activity_fields = [
"id",
"timestamp",
"category(id)",
"author(name,login)",
"field(name)",
"targetMember",
"added",
"removed",
"markup"
]
issues =
if project_prefix == "" do
raw_issues
else
Enum.filter(raw_issues, fn issue ->
String.starts_with?(issue["idReadable"] || "", project_prefix)
end)
end
Kino.Markdown.new("Fetched **#{length(issues)}** issues in selected week window.")
Enrich Issues (Activities + Workstreams)
activity_map =
issues
|> Task.async_stream(
fn issue ->
acts =
Client.fetch_activities!(req, issue["id"],
categories: activities_categories,
fields: activity_fields
)
{issue["id"], acts}
end,
max_concurrency: 8,
timeout: 60_000
)
|> Enum.reduce(%{}, fn
{:ok, {id, acts}}, acc -> Map.put(acc, id, acts)
_, acc -> acc
end)
issue_workstreams =
issues
|> Enum.map(fn issue ->
streams =
Workstreams.streams_for_issue(issue, workstream_rules,
include_substreams: include_substreams?
)
|> Enum.sort()
{issue["id"], streams}
end)
|> Map.new()
Kino.Markdown.new("Fetched activities for **#{map_size(activity_map)}** issues.")
Build Weekly + Daily Summaries
to_start_ms = fn date ->
DateTime.new!(date, ~T[00:00:00], "Etc/UTC") |> DateTime.to_unix(:millisecond)
end
to_end_ms = fn date ->
Date.add(date, 1)
|> DateTime.new!(~T[00:00:00], "Etc/UTC")
|> DateTime.to_unix(:millisecond)
|> Kernel.-(1)
end
weekly_start_ms = to_start_ms.(week_start)
weekly_end_ms = to_end_ms.(week_end)
daily_start_ms = to_start_ms.(last_working_day)
daily_end_ms = to_end_ms.(last_working_day)
build_summary = fn issue, window_start_ms, window_end_ms ->
WeeklyReport.build_issue_summary(
issue,
Map.get(activity_map, issue["id"], []),
state_field: state_field,
assignees_field: assignees_field,
in_progress_names: in_progress_names,
inactive_names: inactive_names,
done_names: done_names,
hold_tags: hold_tags,
special_tags: special_tags,
workstreams: Map.get(issue_workstreams, issue["id"], []),
window_start_ms: window_start_ms,
window_end_ms: window_end_ms
)
end
touched_in_window? = fn issue, summary, window_start_ms, window_end_ms ->
touched_by_issue_timestamps =
Enum.any?([issue["created"], issue["updated"], issue["resolved"]], fn ts ->
is_integer(ts) and ts >= window_start_ms and ts <= window_end_ms
end)
touched_by_details =
summary.description_changes_in_window != [] or
summary.state_changes_in_window != [] or
summary.hold_tag_changes_in_window != [] or
summary.comments_in_window != []
touched_by_issue_timestamps or touched_by_details
end
weekly_summaries =
issues
|> Enum.map(&build_summary.(&1, weekly_start_ms, weekly_end_ms))
|> Enum.filter(fn summary ->
issue = Enum.find(issues, &(&1["idReadable"] == summary.id))
touched_in_window?.(issue, summary, weekly_start_ms, weekly_end_ms)
end)
|> Enum.sort_by(&{&1.status, -(&1.updated || 0), &1.id})
daily_summaries =
issues
|> Enum.map(&build_summary.(&1, daily_start_ms, daily_end_ms))
|> Enum.filter(fn summary ->
issue = Enum.find(issues, &(&1["idReadable"] == summary.id))
touched_in_window?.(issue, summary, daily_start_ms, daily_end_ms)
end)
|> Enum.sort_by(&{&1.status, -(&1.updated || 0), &1.id})
Kino.Markdown.new(
"Built **#{length(daily_summaries)}** daily and **#{length(weekly_summaries)}** weekly issue summaries."
)
Issue Signals Table
signals_table = fn summaries, label ->
rows =
Enum.map(summaries, fn s ->
%{
id: s.id,
title: s.title,
status: s.status,
assignees: Enum.join(s.assignees, ", "),
cycle_time: WeeklyReport.format_duration(s.cycle_time_ms),
net_active_time: WeeklyReport.format_duration(s.net_active_time_ms),
desc_changed: if(s.description_updated_in_window, do: "✓", else: ""),
comments: length(s.comments_in_window),
state_changes: length(s.state_changes_in_window)
}
end)
Kino.Layout.grid([
Kino.Markdown.new("### #{label}"),
Kino.DataTable.new(
rows,
keys: [:id, :title, :status, :assignees, :cycle_time, :net_active_time, :desc_changed, :comments, :state_changes],
name: label
)
])
end
Kino.Layout.tabs([
{"Weekly (#{length(weekly_summaries)})", signals_table.(weekly_summaries, "Weekly Issues")},
{"Daily (#{length(daily_summaries)})", signals_table.(daily_summaries, "Daily Issues")}
])
Build LLM-Ready Report Payload
format_ts = fn
nil -> nil
ms -> DateTime.from_unix!(div(ms, 1000)) |> DateTime.to_iso8601()
end
summary_metrics = fn summaries, window_start_ms, window_end_ms ->
completed =
Enum.count(summaries, fn s ->
s.status == "finished" and is_integer(s.resolved) and s.resolved >= window_start_ms and
s.resolved <= window_end_ms
end)
blocked_or_hold =
Enum.count(summaries, fn s ->
s.is_on_hold or Enum.any?(s.special_tags, &(String.downcase(&1) in ["blocked", "on hold"]))
end)
to_be_specified =
Enum.count(summaries, fn s ->
Enum.any?(s.special_tags, &(String.downcase(&1) == "to be specified"))
end)
with_checklists = Enum.count(summaries, &(&1.checklist.checked + &1.checklist.unchecked > 0))
with_comments = Enum.count(summaries, &(length(&1.comments_in_window) > 0))
with_description_changes = Enum.count(summaries, &(length(&1.description_changes_in_window) > 0))
cycle_values = summaries |> Enum.map(& &1.cycle_time_ms) |> Enum.filter(&is_integer/1)
net_values = summaries |> Enum.map(& &1.net_active_time_ms) |> Enum.filter(&is_integer/1)
avg_or_nil = fn list -> if list == [], do: nil, else: div(Enum.sum(list), length(list)) end
%{
issues_touched: length(summaries),
completed_in_window: completed,
blocked_or_on_hold: blocked_or_hold,
to_be_specified: to_be_specified,
issues_with_checklists: with_checklists,
issues_with_comments: with_comments,
issues_with_description_changes: with_description_changes,
avg_cycle_time_ms: avg_or_nil.(cycle_values),
avg_net_active_time_ms: avg_or_nil.(net_values)
}
end
issue_brief = fn s ->
checklist_delta = s.checklist.checked - s.checklist.unchecked
description_changes =
Enum.map(s.description_changes_in_window, fn change ->
%{
at: format_ts.(change.timestamp),
author: change.author,
type: change.change_type,
before_excerpt: change.previous_excerpt,
after_excerpt: change.new_excerpt,
before_changed: change.previous_changed_text,
after_changed: change.new_changed_text
}
end)
active_time_intervals =
Enum.map(s.active_time_intervals, fn interval ->
%{
from: format_ts.(interval.start_ms),
to: format_ts.(interval.end_ms),
duration_ms: interval.duration_ms,
duration: WeeklyReport.format_duration(interval.duration_ms)
}
end)
inactive_interruptions =
Enum.map(s.inactive_interruption_intervals, fn interval ->
%{
from: format_ts.(interval.start_ms),
to: format_ts.(interval.end_ms),
duration_ms: interval.duration_ms,
duration: WeeklyReport.format_duration(interval.duration_ms)
}
end)
signals =
[
if(length(s.state_changes_in_window) > 0,
do: "state_changed:#{length(s.state_changes_in_window)}",
else: nil
),
if(length(s.comments_in_window) > 0,
do: "comments_added:#{length(s.comments_in_window)}",
else: nil
),
if(s.checklist.checked + s.checklist.unchecked > 0,
do: "checklist_present:#{s.checklist.checked}/#{s.checklist.unchecked}",
else: nil
),
if(length(s.hold_tag_changes_in_window) > 0,
do: "hold_tag_events:#{length(s.hold_tag_changes_in_window)}",
else: nil
),
if(description_changes != [], do: "description_changed:#{length(description_changes)}", else: nil),
if(s.special_tags != [], do: "special_tags:#{Enum.join(s.special_tags, "|")}", else: nil)
]
|> Enum.filter(& &1)
%{
id: s.id,
title: s.title,
status: s.status,
state: s.state,
assignees: s.assignees,
workstreams: s.workstreams,
special_tags: s.special_tags,
checklist: %{checked: s.checklist.checked, unchecked: s.checklist.unchecked, delta: checklist_delta},
cycle_time: WeeklyReport.format_duration(s.cycle_time_ms),
net_active_time: WeeklyReport.format_duration(s.net_active_time_ms),
active_time_intervals: active_time_intervals,
inactive_interruptions: inactive_interruptions,
state_changes: Enum.map(s.state_changes_in_window, &%{at: format_ts.(&1.timestamp), from: &1.from, to: &1.to}),
description_changes: description_changes,
comments:
Enum.map(s.comments_in_window, fn c ->
%{at: format_ts.(c.timestamp), author: c.author, text: c.text}
end),
signals: signals,
updated_at: format_ts.(s.updated),
resolved_at: format_ts.(s.resolved)
}
end
weekly_payload = %{
window: %{from: Date.to_iso8601(week_start), to: Date.to_iso8601(week_end)},
metrics: summary_metrics.(weekly_summaries, weekly_start_ms, weekly_end_ms),
issues: Enum.map(weekly_summaries, issue_brief)
}
daily_payload = %{
window: %{date: Date.to_iso8601(last_working_day)},
metrics: summary_metrics.(daily_summaries, daily_start_ms, daily_end_ms),
issues: Enum.map(daily_summaries, issue_brief)
}
report_payload = %{
generated_at: DateTime.utc_now() |> DateTime.to_iso8601(),
query: query,
configuration: %{
state_field: state_field,
assignees_field: assignees_field,
in_progress_names: in_progress_names,
inactive_names: inactive_names,
done_names: done_names,
special_tags: special_tags,
hold_tags: hold_tags,
include_substreams: include_substreams?
},
daily_report: daily_payload,
weekly_report: weekly_payload
}
report_json = Jason.encode!(report_payload, pretty: true)
daily_json = Jason.encode!(daily_payload, pretty: true)
weekly_json = Jason.encode!(weekly_payload, pretty: true)
preview_limit_in =
Kino.Input.number("JSON preview max chars", default: 4_000)
window_for_payload_in =
Kino.Input.select("Payload window for quick preview/copy",
[daily: "Daily", weekly: "Weekly"],
default: :daily
)
show_full_payload_in =
Kino.Input.checkbox("Show full report JSON inline (can be very large)", default: false)
Kino.render(Kino.Layout.grid([preview_limit_in, window_for_payload_in, show_full_payload_in], columns: 2))
preview_limit = max(Kino.Input.read(preview_limit_in) || 4_000, 500)
selected_payload_window = Kino.Input.read(window_for_payload_in)
show_full_payload? = Kino.Input.read(show_full_payload_in)
selected_payload_json =
case selected_payload_window do
:weekly -> weekly_json
_ -> daily_json
end
truncate_text = fn text, limit ->
if String.length(text) <= limit do
text
else
String.slice(text, 0, limit) <> "\n... (truncated, use copy/download for full content)"
end
end
copy_download_widget = fn id, label, text, filename ->
encoded = Base.encode64(text)
Kino.HTML.new("""
Copy #{label}
Download #{filename}
(() => {
const encoded = "#{encoded}";
const text = atob(encoded);
const root = document.getElementById("#{id}");
if (!root) return;
const copyBtn = root.querySelector('[data-copy="#{id}"]');
const downloadLink = root.querySelector('[data-download="#{id}"]');
const status = root.querySelector('[data-status="#{id}"]');
const setStatus = (message) => {
if (status) status.textContent = message;
};
if (copyBtn) {
copyBtn.addEventListener('click', async () => {
try {
if (navigator.clipboard && navigator.clipboard.writeText) {
await navigator.clipboard.writeText(text);
setStatus('Copied.');
} else {
throw new Error('Clipboard API unavailable');
}
} catch (_) {
const area = document.createElement('textarea');
area.value = text;
document.body.appendChild(area);
area.select();
try {
document.execCommand('copy');
setStatus('Copied (fallback).');
} catch (_err) {
setStatus('Copy blocked. Use download link.');
}
document.body.removeChild(area);
}
});
}
if (downloadLink) {
const blob = new Blob([text], { type: 'text/plain;charset=utf-8' });
const url = URL.createObjectURL(blob);
downloadLink.href = url;
downloadLink.download = "#{filename}";
}
})();
""")
end
summary_rows =
[
%{window: "Daily", issues: daily_payload.metrics.issues_touched, completed: daily_payload.metrics.completed_in_window},
%{window: "Weekly", issues: weekly_payload.metrics.issues_touched, completed: weekly_payload.metrics.completed_in_window}
]
payload_tabs = [
{"Summary", Kino.DataTable.new(summary_rows, keys: [:window, :issues, :completed], name: "Report Summary")},
{"Selected JSON Preview", Kino.Markdown.new("""
Window: **#{selected_payload_window}**
#{truncate_text.(selected_payload_json, preview_limit)}
""")},
{"Payload Tree", Kino.Layout.tabs([
{"Weekly payload", Kino.Tree.new(weekly_payload)},
{"Daily payload", Kino.Tree.new(daily_payload)}
])},
{"Copy / Download", Kino.Layout.grid([
Kino.Markdown.new("Use copy for one-click clipboard and download as a browser-safe fallback."),
copy_download_widget.("copy-full-report-json", "full report JSON", report_json, "weekly-delivery-report.json"),
copy_download_widget.("copy-selected-window-json", "selected window JSON", selected_payload_json, "weekly-delivery-#{selected_payload_window}.json"),
Kino.Download.new(fn -> report_json end, filename: "weekly-delivery-report.json", label: "Download full report JSON")
], columns: 1)}
]
if show_full_payload? do
payload_tabs =
payload_tabs ++
[
{"Full Report JSON", Kino.Markdown.new("""
```json
#{report_json}
```
""")}
]
Kino.Layout.tabs(payload_tabs)
else
Kino.Layout.tabs(payload_tabs)
end
cwd = File.cwd!()
payload_placeholder = "{{REPORT_PAYLOAD_JSON}}"
build_prompt_text = fn prompt_template, payload_json ->
if String.contains?(prompt_template, payload_placeholder) do
String.replace(prompt_template, payload_placeholder, payload_json)
else
prompt_template <> "\n\nJSON payload:\n" <> payload_json
end
end
prompt_dir_files =
["prompts", "/data/prompts"]
|> Enum.filter(&File.dir?/1)
|> Enum.flat_map(fn dir ->
File.ls!(dir)
|> Enum.reject(&(&1 == ".gitkeep"))
|> Enum.map(&Path.join("prompts", &1))
end)
|> Enum.uniq()
|> Enum.sort()
file_sources =
[
%{
id: "root:.prompt",
label: ".prompt (root, backward compatible)",
paths: [".prompt", "/data/.prompt"],
kind: :file
}
] ++
Enum.map(prompt_dir_files, fn relative_path ->
%{
id: "file:" <> relative_path,
label: relative_path,
paths: [relative_path, Path.join("/data", relative_path)],
kind: :file
}
end) ++
[
%{
id: "root:.prompt.example",
label: ".prompt.example (fallback example)",
paths: [".prompt.example", "/data/.prompt.example"],
kind: :file
}
]
file_sources =
Enum.filter(file_sources, fn source ->
Enum.any?(source.paths, &File.regular?/1)
end)
built_in_source = %{
id: "built-in",
label: "Built-in reminder",
kind: :built_in,
template:
"Create .prompt or add a file under prompts/ and keep {{REPORT_PAYLOAD_JSON}} where the report JSON should be injected."
}
manual_source = %{
id: "manual",
label: "Manual prompt",
kind: :manual
}
selectable_sources = file_sources ++ [built_in_source, manual_source]
default_source_id =
cond do
Enum.any?(file_sources, &(&1.id == "root:.prompt")) -> "root:.prompt"
Enum.any?(file_sources, &(&1.id == "root:.prompt.example")) -> "root:.prompt.example"
file_sources != [] -> hd(file_sources).id
true -> built_in_source.id
end
Kino.render(
Kino.Markdown.new("""
### Prompt Template Selection
Choose a prompt file from `prompts/`, keep using the backward-compatible `.prompt`, or switch to **Manual prompt**.
Reminder: keep `#{payload_placeholder}` in the template where the report JSON should be inserted. If it is missing, the notebook appends the JSON payload automatically.
""")
)
prompt_source_in =
Kino.Input.select(
"Prompt source",
Enum.map(selectable_sources, &{&1.id, &1.label}),
default: default_source_id
)
manual_prompt_in =
Kino.Input.textarea(
"Manual prompt (used only when 'Manual prompt' is selected)",
default: ""
)
Kino.render(Kino.Layout.grid([prompt_source_in, manual_prompt_in], columns: 1))
selected_source_id = Kino.Input.read(prompt_source_in)
manual_prompt = Kino.Input.read(manual_prompt_in) |> String.trim()
selected_source =
Enum.find(selectable_sources, &(&1.id == selected_source_id)) || built_in_source
{prompt_template, prompt_source} =
case selected_source.kind do
:file ->
resolved_path = Enum.find(selected_source.paths, &File.regular?/1)
{File.read!(resolved_path), resolved_path}
:manual ->
template =
if manual_prompt == "" do
"Write your manual prompt here and keep {{REPORT_PAYLOAD_JSON}} where the report JSON should be inserted."
else
manual_prompt
end
{template, "manual prompt"}
:built_in ->
{selected_source.template, selected_source.label}
end
has_payload_placeholder? = String.contains?(prompt_template, payload_placeholder)
if has_payload_placeholder? do
Kino.render(
Kino.Markdown.new(
"Prompt template validation: placeholder `#{payload_placeholder}` found in #{prompt_source}. CWD: #{cwd}."
)
)
else
Kino.render(
Kino.Markdown.new(
"Prompt template validation: placeholder `#{payload_placeholder}` not found in #{prompt_source}. JSON payload will be appended at the end. CWD: #{cwd}."
)
)
end
llm_prompt =
build_prompt_text.(prompt_template, report_json)
prompt_preview_limit_in =
Kino.Input.number("Prompt preview max chars", default: 4_000)
show_full_prompt_in =
Kino.Input.checkbox("Show full prompt inline (can be very large)", default: false)
Kino.render(Kino.Layout.grid([prompt_preview_limit_in, show_full_prompt_in], columns: 2))
prompt_preview_limit = max(Kino.Input.read(prompt_preview_limit_in) || 4_000, 500)
show_full_prompt? = Kino.Input.read(show_full_prompt_in)
prompt_preview = truncate_text.(llm_prompt, prompt_preview_limit)
prompt_tabs = [
{"Prompt Preview", Kino.Markdown.new("""
### Prompt To Copy Into ChatGPT (or similar)
Template source: #{prompt_source}
#{prompt_preview}
""")},
{"Copy / Download", Kino.Layout.grid([
copy_download_widget.("copy-final-prompt", "final prompt", llm_prompt, "weekly-delivery-prompt.txt"),
Kino.Download.new(fn -> llm_prompt end, filename: "weekly-delivery-prompt.txt", label: "Download final prompt")
], columns: 1)}
]
if show_full_prompt? do
prompt_tabs =
prompt_tabs ++
[
{"Full Prompt", Kino.Markdown.new("""
```text
#{llm_prompt}
```
""")}
]
Kino.Layout.tabs(prompt_tabs)
else
Kino.Layout.tabs(prompt_tabs)
end
llm_base_url = Kino.Input.read(llm_url_in) |> String.trim_trailing("/")
llm_model = Kino.Input.read(llm_model_in) |> String.trim()
llm_window = Kino.Input.read(llm_window_in)
llm_timeout_ms = (Kino.Input.read(llm_timeout_in) || 300) * 1_000
selected_payload =
case llm_window do
:daily -> daily_payload
:weekly -> weekly_payload
end
selected_json = Jason.encode!(selected_payload, pretty: true)
llm_prompt_text =
build_prompt_text.(prompt_template, selected_json)
Kino.render(
Kino.Markdown.new(
"Sending **#{llm_window}** report to `#{llm_model}` at `#{llm_base_url}` …"
)
)
response =
Req.post(
"#{llm_base_url}/v1/chat/completions",
json: %{
model: llm_model,
messages: [%{role: "user", content: llm_prompt_text}],
stream: false
},
receive_timeout: llm_timeout_ms
)
case response do
{:ok, %{status: 200, body: body}} ->
content =
case body do
%{"choices" => [%{"message" => %{"content" => text}} | _]} -> text
other -> "Unexpected response shape:\n\n```\n#{inspect(other, pretty: true)}\n```"
end
Kino.Markdown.new(content)
{:ok, %{status: status, body: body}} ->
Kino.Markdown.new("""
> **LLM request failed** — HTTP #{status}
>
> ```
> #{inspect(body, pretty: true) |> String.slice(0, 1_000)}
> ```
""")
{:error, reason} ->
Kino.Markdown.new("""
> **LLM request error** — could not reach `#{llm_base_url}`
>
> ```
> #{inspect(reason)}
> ```
>
> Check that Ollama is running and the URL is correct.
""")
end
llm_url_in =
Kino.Input.text("Ollama / OpenAI-compat base URL",
default: System.get_env("LLM_BASE_URL") || "http://localhost:11434"
)
llm_window_in =
Kino.Input.select("Report window to analyse",
[daily: "Daily", weekly: "Weekly"],
default: :daily
)
llm_timeout_in =
Kino.Input.number("Generation timeout (seconds)", default: 300)
Kino.render(Kino.Layout.grid([llm_url_in, llm_window_in, llm_timeout_in], columns: 2))
llm_base_url_for_models = Kino.Input.read(llm_url_in) |> String.trim_trailing("/")
fallback_model = System.get_env("LLM_MODEL") || "qwen2.5:7b"
available_models =
case Req.get("#{llm_base_url_for_models}/v1/models", receive_timeout: 10_000) do
{:ok, %{status: 200, body: %{"data" => models}}} when is_list(models) ->
models
|> Enum.map(fn model -> Map.get(model, "id") end)
|> Enum.filter(&is_binary/1)
|> Enum.uniq()
|> Enum.sort()
_ ->
[]
end
{model_options, default_model, model_source_message} =
if available_models == [] do
{
[{fallback_model, "#{fallback_model} (fallback default)"}],
fallback_model,
"Could not load models from `#{llm_base_url_for_models}/v1/models`. Using fallback model input."
}
else
selected_default_model =
if fallback_model in available_models, do: fallback_model, else: hd(available_models)
{
Enum.map(available_models, &{&1, &1}),
selected_default_model,
"Loaded #{length(available_models)} model(s) from `#{llm_base_url_for_models}/v1/models`."
}
end
Kino.render(Kino.Markdown.new(model_source_message))
llm_model_in = Kino.Input.select("Model", model_options, default: default_model)
Kino.render(llm_model_in)