Skip to content

Commit

Permalink
change return of LLMChain.run/2 - breaking change (#170)
Browse files Browse the repository at this point in the history
* return list of exchanged messages after LLMChain.run
- not just the last message

* remove "last_message" from LLMChain.run return tuple
- updates to all usages and tests

* updated code examples in notebooks
  • Loading branch information
brainlid authored Aug 21, 2024
1 parent c5e93ff commit 07bd3fb
Show file tree
Hide file tree
Showing 16 changed files with 216 additions and 129 deletions.
23 changes: 13 additions & 10 deletions lib/chains/data_extraction_chain.ex
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ defmodule LangChain.Chains.DataExtractionChain do
alias LangChain.Message
alias LangChain.Message.ToolCall
alias LangChain.Chains.LLMChain
alias LangChain.ChatModels.ChatOpenAI

@function_name "information_extraction"
@extraction_template ~s"Extract and save the relevant entities mentioned in the following passage together with their properties. Use the value `null` when missing in the passage.
Expand All @@ -93,7 +94,7 @@ Passage:
messages =
[
Message.new_system!(
"You are a helpful assistant that extracts structured data from text passages. Only use the functions you have been provided with."
"You are a helpful assistant that extracts structured data from text passages. Only use the functions you have been provided with. Extract the data in a single tool use."
),
PromptTemplate.new!(%{role: :user, text: @extraction_template})
]
Expand All @@ -106,15 +107,17 @@ Passage:
|> LLMChain.add_messages(messages)
|> LLMChain.run()
|> case do
{:ok, _updated_chain,
%Message{
role: :assistant,
tool_calls: [
%ToolCall{
name: @function_name,
arguments: %{"info" => info}
}
]
{:ok,
%LLMChain{
last_message: %Message{
role: :assistant,
tool_calls: [
%ToolCall{
name: @function_name,
arguments: %{"info" => info}
}
]
}
}}
when is_list(info) ->
{:ok, info}
Expand Down
47 changes: 33 additions & 14 deletions lib/chains/llm_chain.ex
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ defmodule LangChain.Chains.LLMChain do
@doc """
Define an LLMChain. This is the heart of the LangChain library.
The chain deals with tools, a tool map, delta tracking, last_message tracking,
conversation messages, and verbose logging. This helps by separating these
responsibilities from the LLM making it easier to support additional LLMs
because the focus is on communication and formats instead of all the extra
logic.
The chain deals with tools, a tool map, delta tracking, tracking the messages
exchanged during a run, the last_message tracking, conversation messages, and
verbose logging. This helps by separating these responsibilities from the LLM
making it easier to support additional LLMs because the focus is on
communication and formats instead of all the extra logic.
## Callbacks
Expand Down Expand Up @@ -92,6 +92,9 @@ defmodule LangChain.Chains.LLMChain do
field :delta, :any, virtual: true
# Track the last `%Message{}` received in the chain.
field :last_message, :any, virtual: true
# Internally managed. The list of exchanged messages during a `run` function
# execution.
field :exchanged_messages, {:array, :any}, default: [], virtual: true
# Track if the state of the chain expects a response from the LLM. This
# happens after sending a user message, when a tool_call is received, or
# when we've provided a tool response and the LLM needs to respond.
Expand Down Expand Up @@ -233,11 +236,12 @@ defmodule LangChain.Chains.LLMChain do
an opportunity to use the `ToolResult` information in an assistant response
message. In essence, this mode always gives the LLM the last word.
"""
@spec run(t(), Keyword.t()) ::
{:ok, t(), Message.t() | [Message.t()]} | {:error, t(), String.t()}
@spec run(t(), Keyword.t()) :: {:ok, t()} | {:error, t(), String.t()}
def run(chain, opts \\ [])

def run(%LLMChain{} = chain, opts) do
raise_on_obsolete_run_opts(opts)

# set the callback function on the chain
if chain.verbose, do: IO.inspect(chain.llm, label: "LLM")

Expand All @@ -246,12 +250,15 @@ defmodule LangChain.Chains.LLMChain do
tools = chain.tools
if chain.verbose, do: IO.inspect(tools, label: "TOOLS")

# clear the set of exchanged messages.
chain = clear_exchanged_messages(chain)

case Keyword.get(opts, :mode, nil) do
nil ->
# run the chain and format the return
case do_run(chain) do
{:ok, chain} ->
{:ok, chain, chain.last_message}
{:ok, chain}

{:error, _chain, _reason} = error ->
error
Expand All @@ -268,21 +275,21 @@ defmodule LangChain.Chains.LLMChain do
# Repeatedly run the chain until we get a successful ToolResponse or processed
# assistant message. Once we've reached success, it is not submitted back to the LLM,
# the process ends there.
@spec run_until_success(t()) :: {:ok, t(), Message.t()} | {:error, t(), String.t()}
@spec run_until_success(t()) :: {:ok, t()} | {:error, t(), String.t()}
defp run_until_success(%LLMChain{last_message: %Message{} = last_message} = chain) do
stop_or_recurse =
cond do
chain.current_failure_count >= chain.max_retry_count ->
{:error, chain, "Exceeded max failure count"}

last_message.role == :tool && !Message.tool_had_errors?(last_message) ->
# a successful tool result is success
{:ok, chain, last_message}
# a successful tool result has no errors
{:ok, chain}

last_message.role == :assistant ->
# it was successful if we didn't generate a user message in response to
# an error.
{:ok, chain, last_message}
{:ok, chain}

true ->
:recurse
Expand Down Expand Up @@ -311,9 +318,9 @@ defmodule LangChain.Chains.LLMChain do
# Repeatedly run the chain while `needs_response` is true. This will execute
# tools and re-submit the tool result to the LLM giving the LLM an
# opportunity to execute more tools or return a response.
@spec run_while_needs_response(t()) :: {:ok, t(), Message.t()} | {:error, t(), String.t()}
@spec run_while_needs_response(t()) :: {:ok, t()} | {:error, t(), String.t()}
defp run_while_needs_response(%LLMChain{needs_response: false} = chain) do
{:ok, chain, chain.last_message}
{:ok, chain}
end

defp run_while_needs_response(%LLMChain{needs_response: true} = chain) do
Expand Down Expand Up @@ -575,6 +582,7 @@ defmodule LangChain.Chains.LLMChain do
chain
| messages: chain.messages ++ [new_message],
last_message: new_message,
exchanged_messages: chain.exchanged_messages ++ [new_message],
needs_response: needs_response
}
end
Expand Down Expand Up @@ -835,4 +843,15 @@ defmodule LangChain.Chains.LLMChain do
Callbacks.fire(chain.callbacks, callback_name, [chain] ++ additional_arguments)
chain
end

defp clear_exchanged_messages(%LLMChain{} = chain) do
%LLMChain{chain | exchanged_messages: []}
end

defp raise_on_obsolete_run_opts(opts) do
if Keyword.has_key?(opts, :callback_fn) do
raise LangChainError,
"The LLMChain.run option `:callback_fn` was removed; see `add_callback/2` instead."
end
end
end
2 changes: 1 addition & 1 deletion lib/chains/text_to_title_chain.ex
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ defmodule LangChain.Chains.TextToTitleChain do
|> TextToTitleChain.run()
"""
@spec run(t(), Keyword.t()) :: String.t() | no_return()
@spec run(t(), Keyword.t()) :: {:ok, LLMChain.t()} | {:error, LLMChain.t(), String.t()}
def run(%TextToTitleChain{} = chain, opts \\ []) do
messages =
[
Expand Down
2 changes: 1 addition & 1 deletion lib/chat_models/llm_callbacks.ex
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ defmodule LangChain.ChatModels.LLMCallbacks do
A function declaration that matches the signature.
def handle_llm_new_delta(_chat_model, _index, delta) do
def handle_llm_new_delta(_chat_model, delta) do
IO.write(delta)
end
"""
Expand Down
4 changes: 2 additions & 2 deletions lib/utils/chain_result.ex
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ defmodule LangChain.Utils.ChainResult do
"""
@spec to_string(
LLMChain.t()
| {:ok, LLMChain.t(), Message.t()}
| {:ok, LLMChain.t()}
| {:error, LLMChain.t(), String.t()}
) ::
{:ok, String.t()} | {:error, LLMChain.t(), String.t()}
Expand All @@ -29,7 +29,7 @@ defmodule LangChain.Utils.ChainResult do
{:error, chain, reason}
end

def to_string({:ok, %LLMChain{} = chain, _message}) do
def to_string({:ok, %LLMChain{} = chain}) do
ChainResult.to_string(chain)
end

Expand Down
8 changes: 4 additions & 4 deletions notebooks/context-specific-image-descriptions.livemd
Original file line number Diff line number Diff line change
Expand Up @@ -177,14 +177,14 @@ alias LangChain.MessageProcessors.JsonProcessor
# When we `apply_prompt_templates` below, the data is rendered into the template.
image_data_from_other_system = "image of urban art mural on underpass at 507 King St E"

{:ok, _updated_chain, response} =
{:ok, updated_chain} =
%{llm: openai_chat_model, verbose: true}
|> LLMChain.new!()
|> LLMChain.apply_prompt_templates(messages, %{extra_image_info: image_data_from_other_system})
|> LLMChain.message_processors([JsonProcessor.new!()])
|> LLMChain.run(mode: :until_success)

response.processed_content
updated_chain.last_message.processed_content
```

Notice that when running the chain, we use the option `mode: :until_success`. Some LLMs are better are generating valid JSON than other LLMs. When we included the `JsonProcesser`, it parses the assistant's content, converting it into an Elixir map. The converted data is stored on the `message.processed_content`.
Expand Down Expand Up @@ -238,14 +238,14 @@ alias LangChain.MessageProcessors.JsonProcessor
# When we `apply_prompt_templates` below, the data is rendered into the template.
image_data_from_other_system = "image of urban art mural on underpass at 507 King St E"

{:ok, _updated_chain, response} =
{:ok, updated_chain} =
%{llm: anthropic_chat_model, verbose: true}
|> LLMChain.new!()
|> LLMChain.apply_prompt_templates(messages, %{extra_image_info: image_data_from_other_system})
|> LLMChain.message_processors([JsonProcessor.new!()])
|> LLMChain.run(mode: :until_success)

response.processed_content
updated_chain.last_message.processed_content
```

Nice! The Elixir LangChain library abstracted away the differences between the two services. With no code changes, we can make a similar request about the image from Anthropic's Claude LLM as well!
Expand Down
7 changes: 4 additions & 3 deletions notebooks/custom_functions.livemd
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ Additionally, the `stream: false` option says we want the result only when it's
```elixir
alias LangChain.Chains.LLMChain

{:ok, updated_chain, response} =
{:ok, updated_chain} =
%{llm: chat_model, custom_context: context, verbose: true}
|> LLMChain.new!()
|> LLMChain.add_messages(messages)
Expand All @@ -245,6 +245,7 @@ alias LangChain.Chains.LLMChain
# function calls and provide a response.
|> LLMChain.run(mode: :while_needs_response)

response = updated_chain.last_message
IO.write(response.content)
response.content
```
Expand Down Expand Up @@ -382,8 +383,8 @@ MESSAGE PROCESSED: %LangChain.Message{
tool_calls: [],
tool_results: nil
}
Joan's favorite,
Aardvark graces night with charm,
Joan's favorite,
Aardvark graces night with charm,
Silent earth's delight.
```

Expand Down
12 changes: 6 additions & 6 deletions notebooks/getting_started.livemd
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ alias LangChain.Chains.LLMChain
alias LangChain.ChatModels.ChatOpenAI
alias LangChain.Message

{:ok, _updated_chain, response} =
{:ok, updated_chain} =
%{llm: ChatOpenAI.new!(%{model: "gpt-4o"})}
|> LLMChain.new!()
|> LLMChain.add_message(Message.new_user!("Testing, testing!"))
|> LLMChain.run()

response.content
updated_chain.last_message.content
```

Nice! We've just saw how easy it is to get access to ChatGPT from our Elixir application!
Expand All @@ -48,7 +48,7 @@ When working with ChatGPT and other LLMs, the conversation works as a series of
Let's create a system message followed by a user message.

```elixir
{:ok, _updated_chain, response} =
{:ok, updated_chain} =
%{llm: ChatOpenAI.new!(%{model: "gpt-4"})}
|> LLMChain.new!()
|> LLMChain.add_messages([
Expand All @@ -59,7 +59,7 @@ Let's create a system message followed by a user message.
])
|> LLMChain.run()

response.content
updated_chain.last_message.content
```

Here's the answer it gave me when I ran it:
Expand Down Expand Up @@ -94,7 +94,7 @@ handler = %{
end
}

{:ok, _updated_chain, response} =
{:ok, updated_chain} =
%{
# llm config for streaming and the deltas callback
llm: ChatOpenAI.new!(%{model: "gpt-4o", stream: true, callbacks: [handler]}),
Expand All @@ -108,7 +108,7 @@ handler = %{
])
|> LLMChain.run()

response.content
updated_chain.last_message.content
# streamed
# ==> Washington D.C. stands,
# ... Monuments reflect history,
Expand Down
4 changes: 2 additions & 2 deletions test/chains/data_extraction_chain_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ defmodule LangChain.Chains.DataExtractionChainTest do
# run the chain, chain.run(prompt to extract data from)
data_prompt =
"Alex is 5 feet tall. Claudia is 4 feet taller than Alex and jumps higher than him.
Claudia is a brunette and Alex is blonde. Alex's dog Frosty is a labrador and likes to play hide and seek."
Claudia is a brunette and Alex is blonde. Alex's dog Frosty is a labrador and likes to play hide and seek. Identify each person and their relevant information."

{:ok, result} = DataExtractionChain.run(chat, schema_parameters, data_prompt, verbose: false)
{:ok, result} = DataExtractionChain.run(chat, schema_parameters, data_prompt, verbose: true)

assert result == [
%{
Expand Down
Loading

0 comments on commit 07bd3fb

Please sign in to comment.