Deeplearning.AI Order Bot
Mix.install([
{:openai_ex, "~> 0.8.4"},
{:kino, "~> 0.13.2"}
])
alias OpenaiEx
alias OpenaiEx.Chat
alias OpenaiEx.ChatMessage
Source
This notebook is an elixir translation of the python notebook in Lesson 8, of Deeplearning.AI‘s course ChatGPT Prompt Engineering for Developers.
Chat Format
In this notebook, you will explore how you can utilize the chat format to have extended conversations with chatbots personalized or specialized for specific tasks or behaviors.
Setup
openai =
System.fetch_env!("LB_OPENAI_API_KEY")
|> OpenaiEx.new()
# uncomment the line at the end of this block comment when working with a local LLM with a
# proxy such as llama.cpp-python in the example below, our development livebook server is
# running in a docker dev container while the local llm is running on the host machine
# |> OpenaiEx.with_base_url("http://host.docker.internal:8000/v1")
defmodule OpenaiEx.Notebooks.DlaiOrderbot do
alias OpenaiEx
alias OpenaiEx.Chat
def create_chat_req(args = [_ | _]) do
args
|> Enum.into(%{
model: "gpt-4o-mini",
temperature: 0
})
|> Chat.Completions.new()
end
def get_completion(openai = %OpenaiEx{}, cc_req = %{}) do
openai
|> Chat.Completions.create!(cc_req)
# for debugging
# |> IO.inspect()
|> Map.get("choices")
|> List.first()
|> Map.get("message")
|> Map.get("content")
end
end
alias OpenaiEx.Notebooks.DlaiOrderbot
messages = [
ChatMessage.system("You are an assistant that speaks like Shakespeare."),
ChatMessage.user("tell me a joke"),
ChatMessage.assistant("Why did the chicken cross the road"),
ChatMessage.user("I don't know")
]
req = DlaiOrderbot.create_chat_req(messages: messages, temperature: 1)
openai |> DlaiOrderbot.get_completion(req)
messages = [
ChatMessage.system("You are friendly chatbot."),
ChatMessage.user("Hi, my name is Isa")
]
req = DlaiOrderbot.create_chat_req(messages: messages, temperature: 1)
openai |> DlaiOrderbot.get_completion(req)
messages = [
ChatMessage.system("You are friendly chatbot."),
ChatMessage.user("Yes, can you remind me, What is my name?")
]
req = DlaiOrderbot.create_chat_req(messages: messages, temperature: 1)
openai |> DlaiOrderbot.get_completion(req)
messages = [
ChatMessage.system("You are friendly chatbot."),
ChatMessage.user("Hi, my name is Isa"),
ChatMessage.assistant(
"Hi Isa! It's nice to meet you. Is there anything I can help you with today?"
),
ChatMessage.user("Yes, can you remind me, What is my name?")
]
req = DlaiOrderbot.create_chat_req(messages: messages, temperature: 1)
openai |> DlaiOrderbot.get_completion(req)
Order Bot
We can automate the collection of user prompts and assistant responses to build a OrderBot. The OrderBot will take orders at a pizza restaurant.
context = [
ChatMessage.system("""
You are OrderBot, an automated service to collect orders for a pizza restaurant. \
You first greet the customer, then collects the order, \
and then asks if it's a pickup or delivery. \
You wait to collect the entire order, then summarize it and check for a final \
time if the customer wants to add anything else. \
If it's a delivery, you ask for an address. \
Finally you collect the payment.\
Make sure to clarify all options, extras and sizes to uniquely \
identify the item from the menu.\
You respond in a short, very conversational friendly style. \
The menu includes \
pepperoni pizza 12.95, 10.00, 7.00 \
cheese pizza 10.95, 9.25, 6.50 \
eggplant pizza 11.95, 9.75, 6.75 \
fries 4.50, 3.50 \
greek salad 7.25 \
Toppings: \
extra cheese 2.00, \
mushrooms 1.50 \
sausage 3.00 \
canadian bacon 3.50 \
AI sauce 1.50 \
peppers 1.00 \
Drinks: \
coke 3.00, 2.00, 1.00 \
sprite 3.00, 2.00, 1.00 \
bottled water 5.00 \
""")
]
append_completion = fn openai, messages, frame ->
req = DlaiOrderbot.create_chat_req(messages: messages)
bot_says = openai |> DlaiOrderbot.get_completion(req)
Kino.Frame.append(frame, Kino.Text.new("#{bot_says}"))
bot_says
end
chat_frame = Kino.Frame.new()
inputs = [prompt: Kino.Input.textarea("You")]
form = Kino.Control.form(inputs, submit: "Send", reset_on_submit: [:prompt])
Kino.Frame.render(chat_frame, Kino.Markdown.new("### Orderbot Chat"))
Kino.Layout.grid([chat_frame, form], boxed: true, gap: 16) |> Kino.render()
bot_says = openai |> append_completion.(context, chat_frame)
Kino.listen(
form,
context ++ [ChatMessage.assistant(bot_says)],
fn %{data: %{prompt: you_say}}, history ->
Kino.Frame.append(chat_frame, Kino.Markdown.new("**You** #{you_say}"))
bot_says = openai |> append_completion.(history ++ [ChatMessage.user(you_say)], chat_frame)
{:cont, history ++ [ChatMessage.user(you_say), ChatMessage.assistant(bot_says)]}
end
)