got some tool calling running

This commit is contained in:
2026-03-05 22:06:58 -07:00
parent cacae19f74
commit b89d4e5a28
8 changed files with 495 additions and 151 deletions

View File

@@ -0,0 +1,45 @@
defmodule ElixirAi.ChatUtils do
require Logger
import ElixirAi.AiUtils.StreamLineUtils
def request_ai_response(server, messages, tools) do
Task.start(fn ->
api_url = Application.fetch_env!(:elixir_ai, :ai_endpoint)
api_key = Application.fetch_env!(:elixir_ai, :ai_token)
model = Application.fetch_env!(:elixir_ai, :ai_model)
tool_definition = tools |> Enum.map(fn {_name, definition} -> definition end)
body = %{
model: model,
stream: true,
messages: messages |> Enum.map(&api_message/1),
tools: tool_definition
}
headers = [{"authorization", "Bearer #{api_key}"}]
case Req.post(api_url,
json: body,
headers: headers,
into: fn {:data, data}, acc ->
data
|> String.split("\n")
|> Enum.each(&handle_stream_line(server, &1))
{:cont, acc}
end
) do
{:ok, _} ->
:ok
{:error, reason} ->
IO.warn("AI request failed: #{inspect(reason)}")
end
end)
end
def api_message(%{role: role, content: content}) do
%{role: Atom.to_string(role), content: content}
end
end

View File

@@ -0,0 +1,167 @@
defmodule ElixirAi.AiUtils.StreamLineUtils do
require Logger
def handle_stream_line(_server, "") do
:ok
end
def handle_stream_line(_server, "data: [DONE]") do
# send(server, :ai_stream_done)
:ok
end
def handle_stream_line(server, "data: " <> json) do
case Jason.decode(json) do
{:ok, body} ->
# Logger.debug("Received AI chunk: #{inspect(body)}")
handle_stream_line(server, body)
other ->
Logger.error("Failed to decode AI response chunk: #{inspect(other)}")
:ok
end
end
# first streamed response
def handle_stream_line(server, %{
"choices" => [%{"delta" => %{"content" => nil, "role" => "assistant"}}],
"id" => id
}) do
send(
server,
{:start_new_ai_response, id}
)
end
# last streamed response
def handle_stream_line(server, %{
"choices" => [%{"finish_reason" => "stop"}],
"id" => id
}) do
send(
server,
{:ai_stream_finish, id}
)
end
# streamed in reasoning
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{"reasoning_content" => reasoning_content},
"finish_reason" => nil
}
],
"id" => id
}) do
send(
server,
{:ai_reasoning_chunk, id, reasoning_content}
)
end
# streamed in text
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{"content" => reasoning_content},
"finish_reason" => nil
}
],
"id" => id
}) do
send(
server,
{:ai_text_chunk, id, reasoning_content}
)
end
# start tool call
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{
"tool_calls" => [
%{
"function" => %{
"name" => tool_name,
"arguments" => tool_args_start
}
}
]
},
"finish_reason" => nil,
"index" => tool_index
}
],
"id" => id
}) do
send(
server,
{:ai_tool_call_start, id, {tool_name, tool_args_start, tool_index}}
)
end
# middle tool call
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{
"tool_calls" => [
%{
"function" => %{
"arguments" => tool_args_diff
}
}
]
},
"finish_reason" => nil,
"index" => tool_index
}
],
"id" => id
}) do
send(
server,
{:ai_tool_call_middle, id, {tool_args_diff, tool_index}}
)
end
# end tool call
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{},
"finish_reason" => "tool_calls",
"index" => tool_index
}
],
"id" => id
}) do
send(
server,
{:ai_tool_call_end, id, tool_index}
)
end
def handle_stream_line(_server, %{"error" => error_info}) do
Logger.error("Received error from AI stream: #{inspect(error_info)}")
:ok
end
def handle_stream_line(server, json) when is_binary(json) do
case Jason.decode(json) do
{:ok, body} ->
handle_stream_line(server, body)
_ ->
Logger.warning("Received unmatched stream line: #{inspect(json)}")
:ok
end
end
def handle_stream_line(_server, unmatched_message) do
Logger.warning("Received unmatched stream line: #{inspect(unmatched_message)}")
:ok
end
end

View File

@@ -29,16 +29,35 @@ defmodule ElixirAi.ChatRunner do
{:ok, state}
end
def tools do
%{
"store_thing" => %{
definition: ElixirAi.ToolTesting.store_thing_definition("store_thing"),
function: &ElixirAi.ToolTesting.hold_thing/1
},
"read_thing" => %{
definition: ElixirAi.ToolTesting.read_thing_definition("read_thing"),
function: &ElixirAi.ToolTesting.get_thing/0
}
}
end
def handle_cast({:user_message, text_content}, state) do
new_message = %{role: :user, content: text_content}
broadcast({:user_chat_message, new_message})
new_state = %{state | messages: state.messages ++ [new_message], turn: :assistant}
request_ai_response(self(), new_state.messages)
tools =
tools()
|> Enum.map(fn {name, %{definition: definition}} -> {name, definition} end)
|> Enum.into(%{})
request_ai_response(self(), new_state.messages, tools)
{:noreply, new_state}
end
def handle_info({:start_new_ai_response, id}, state) do
starting_response = %{id: id, reasoning_content: "", content: ""}
starting_response = %{id: id, reasoning_content: "", content: "", tool_calls: []}
broadcast({:start_ai_response_stream, starting_response})
{:noreply, %{state | streaming_response: starting_response}}
@@ -88,7 +107,8 @@ defmodule ElixirAi.ChatRunner do
final_message = %{
role: :assistant,
content: state.streaming_response.content,
reasoning_content: state.streaming_response.reasoning_content
reasoning_content: state.streaming_response.reasoning_content,
tool_calls: state.streaming_response.tool_calls
}
{:noreply,
@@ -100,6 +120,111 @@ defmodule ElixirAi.ChatRunner do
}}
end
def handle_info({:ai_tool_call_start, _id, {tool_name, tool_args_start, tool_index}}, state) do
Logger.info("AI started tool call #{tool_name}")
new_streaming_response = %{
state.streaming_response
| tool_calls:
state.streaming_response.tool_calls ++
[
%{
name: tool_name,
arguments: tool_args_start,
index: tool_index
}
]
}
{:noreply, %{state | streaming_response: new_streaming_response}}
end
def handle_info({:ai_tool_call_middle, _id, {tool_args_diff, tool_index}}, state) do
new_streaming_response = %{
state.streaming_response
| tool_calls:
Enum.map(state.streaming_response.tool_calls, fn
%{
arguments: existing_args,
index: ^tool_index
} = tool_call ->
%{
tool_call
| arguments: existing_args <> tool_args_diff
}
other ->
other
end)
}
{:noreply, %{state | streaming_response: new_streaming_response}}
end
def handle_info({:ai_tool_call_end, _id, tool_index}, state) do
tool_calls =
Enum.map(state.streaming_response.tool_calls, fn
%{
arguments: existing_args,
index: ^tool_index
} = tool_call ->
case Jason.decode(existing_args) do
{:ok, decoded_args} ->
tool_function = tools()[tool_call.name].function
res = tool_function.(decoded_args)
Map.put(tool_call, :result, res)
{:error, e} ->
Map.put(tool_call, :error, "Failed to decode tool arguments: #{inspect(e)}")
end
other ->
other
end)
all_tool_calls_finished =
Enum.all?(tool_calls, fn call ->
Map.has_key?(call, :result) or Map.has_key?(call, :error)
end)
state =
case all_tool_calls_finished do
true ->
Logger.info("All tool calls finished, broadcasting updated tool calls with results")
new_message = %{
role: :assistant,
content: state.streaming_response.content,
reasoning_content: state.streaming_response.reasoning_content,
tool_calls: tool_calls
}
new_state = %{
state
| messages:
state.messages ++
[
new_message
],
streaming_response: nil
}
broadcast({:tool_calls_finished, new_message})
false ->
%{
state
| streaming_response: %{
state.streaming_response
| tool_calls: tool_calls
}
}
end
{:noreply, state}
end
def handle_call(:get_conversation, _from, state) do
{:reply, state, state}
end

View File

@@ -1,120 +0,0 @@
defmodule ElixirAi.ChatUtils do
require Logger
def request_ai_response(server, messages) do
Task.start(fn ->
api_url = Application.fetch_env!(:elixir_ai, :ai_endpoint)
api_key = Application.fetch_env!(:elixir_ai, :ai_token)
model = Application.fetch_env!(:elixir_ai, :ai_model)
body = %{
model: model,
stream: true,
messages: messages |> Enum.map(&api_message/1)
}
headers = [{"authorization", "Bearer #{api_key}"}]
case Req.post(api_url,
json: body,
headers: headers,
into: fn {:data, data}, acc ->
data
|> String.split("\n")
|> Enum.each(&handle_stream_line(server, &1))
{:cont, acc}
end
) do
{:ok, _} ->
:ok
{:error, reason} ->
IO.warn("AI request failed: #{inspect(reason)}")
end
end)
end
def handle_stream_line(_server, "") do
:ok
end
def handle_stream_line(server, "data: [DONE]") do
# send(server, :ai_stream_done)
:ok
end
def handle_stream_line(server, "data: " <> json) do
case Jason.decode(json) do
{:ok, body} ->
# Logger.debug("Received AI chunk: #{inspect(body)}")
handle_stream_line(server, body)
other ->
Logger.error("Failed to decode AI response chunk: #{inspect(other)}")
:ok
end
end
# first streamed response
def handle_stream_line(server, %{
"choices" => [%{"delta" => %{"content" => nil, "role" => "assistant"}}],
"id" => id
}) do
send(
server,
{:start_new_ai_response, id}
)
end
# last streamed response
def handle_stream_line(server, %{
"choices" => [%{"finish_reason" => "stop"}],
"id" => id
}) do
send(
server,
{:ai_stream_finish, id}
)
end
# streamed in reasoning
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{"reasoning_content" => reasoning_content},
"finish_reason" => nil
}
],
"id" => id
}) do
send(
server,
{:ai_reasoning_chunk, id, reasoning_content}
)
end
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{"content" => reasoning_content},
"finish_reason" => nil
}
],
"id" => id
}) do
send(
server,
{:ai_text_chunk, id, reasoning_content}
)
end
def handle_stream_line(_server, unmatched_message) do
Logger.warning("Received unmatched stream line: #{inspect(unmatched_message)}")
:ok
end
def api_message(%{role: role, content: content}) do
%{role: Atom.to_string(role), content: content}
end
end

View File

@@ -0,0 +1,55 @@
defmodule ElixirAi.ToolTesting do
use GenServer
def hold_thing(thing) do
GenServer.cast(__MODULE__, {:hold_thing, thing})
end
def get_thing do
GenServer.call(__MODULE__, :get_thing)
end
def store_thing_definition(name) do
%{
"type" => "function",
"function" => %{
"name" => name,
"description" => "store key value pair",
"parameters" => %{
"type" => "object",
"properties" => %{
"name" => %{"type" => "string"},
"value" => %{"type" => "string"}
},
"required" => ["name", "value"]
}
}
}
end
def read_thing_definition(name) do
%{
"type" => "function",
"function" => %{
"name" => name,
"description" => "read key value pair that was previously stored with store_thing"
}
}
end
def start_link(_) do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
def init(args) do
{:ok, args}
end
def handle_cast({:hold_thing, thing}, _state) do
{:noreply, thing}
end
def handle_call(:get_thing, _from, state) do
{:reply, state, state}
end
end