Powered by AppSignal & Oban Pro

Using PromptBuddy

nbs/usage.livemd

Using PromptBuddy

Mix.install([
  {:kino_promptbuddy, path: Path.join(__DIR__, "..")},
  {:kino, "~>0.17.0"},
  {:mix_install_watcher, "~> 0.1.0"}
])

The context is all precedent cells

alias Kino.PromptBuddy.Context
model = "openrouter:anthropic/claude-haiku-4.5"
n_every = 24
session_id = "356mdgtglekdibxpicxgpgd45ila3nti3vkpr4j6i6xo764r"
current_cell_id = Context.get_current_cell_id()
user_text = ""
smart_cell_pid = Process.whereis(:"promptbuddy_#{"nofile"}")
import Kino.Shorts
outer = frame()
body = frame()
chat_history = Kino.PromptBuddy.get_history(current_cell_id)
prompt_blank? = String.trim(user_text) == ""
previous_msgs = Kino.PromptBuddy.history_markdown(chat_history)
current_prompt_header = Kino.Markdown.new("**You**:")
current_prompt_body = Kino.Markdown.new(user_text)
buddy_header = Kino.Markdown.new("**Buddy**:")

Kino.Frame.render(
  outer,
  Kino.Layout.grid(
    previous_msgs ++ [current_prompt_header, current_prompt_body, buddy_header, body]
  )
)

unless prompt_blank? do
  Task.start(fn ->
    Process.sleep(100)

    if smart_cell_pid do
      send(smart_cell_pid, {:clear_editor, current_cell_id})
    end
  end)

  system_msg =
    ReqLLM.Context.system(
      "You are a patient pair-programming partner using **Polya's method** / **Socratic** style.\nPRIORITY: (1) Answer only the final PROMPT, (2) be brief, (3) one code fence if needed.\n"
    )

  prompt_msg = ReqLLM.Context.user("--- BEGIN PROMPT ---
#{user_text}
--- END PROMPT ---
")

  precedent_msgs =
    case Context.get_notebook(session_id) do
      {:ok, nb} -> Context.build_precedent_messages(nb, current_cell_id)
      _ -> []
    end

  history_msgs = Kino.PromptBuddy.history_to_messages(chat_history)
  messages = [system_msg] ++ precedent_msgs ++ history_msgs ++ [prompt_msg]

  Task.start(fn ->
    Kino.PromptBuddy.stream_response_and_update_history(
      model,
      messages,
      body,
      outer,
      user_text,
      chat_history,
      current_cell_id,
      n_every
    )
  end)
end

outer

My name is Fred

Now Buddy knows my name

alias Kino.PromptBuddy.Context
model = "openrouter:anthropic/claude-sonnet-4.5"
n_every = 24
session_id = "356mdgtglekdibxpicxgpgd45ila3nti3vkpr4j6i6xo764r"
current_cell_id = Context.get_current_cell_id()
user_text = ""
smart_cell_pid = Process.whereis(:"promptbuddy_#{"nofile"}")
import Kino.Shorts
outer = frame()
body = frame()
chat_history = Kino.PromptBuddy.get_history(current_cell_id)
prompt_blank? = String.trim(user_text) == ""
previous_msgs = Kino.PromptBuddy.history_markdown(chat_history)
current_prompt_header = Kino.Markdown.new("**You**:")
current_prompt_body = Kino.Markdown.new(user_text)
buddy_header = Kino.Markdown.new("**Buddy**:")

Kino.Frame.render(
  outer,
  Kino.Layout.grid(
    previous_msgs ++ [current_prompt_header, current_prompt_body, buddy_header, body]
  )
)

unless prompt_blank? do
  Task.start(fn ->
    Process.sleep(100)

    if smart_cell_pid do
      send(smart_cell_pid, {:clear_editor, current_cell_id})
    end
  end)

  system_msg =
    ReqLLM.Context.system(
      "You are a patient pair-programming partner using **Polya's method** / **Socratic** style.\nPRIORITY: (1) Answer only the final PROMPT, (2) be brief, (3) one code fence if needed.\n"
    )

  prompt_msg = ReqLLM.Context.user("--- BEGIN PROMPT ---
#{user_text}
--- END PROMPT ---
")

  precedent_msgs =
    case Context.get_notebook(session_id) do
      {:ok, nb} -> Context.build_precedent_messages(nb, current_cell_id)
      _ -> []
    end

  history_msgs = Kino.PromptBuddy.history_to_messages(chat_history)
  messages = [system_msg] ++ precedent_msgs ++ history_msgs ++ [prompt_msg]

  Task.start(fn ->
    Kino.PromptBuddy.stream_response_and_update_history(
      model,
      messages,
      body,
      outer,
      user_text,
      chat_history,
      current_cell_id,
      n_every
    )
  end)
end

outer