diff --git a/lib/elixir_ai/conversation_manager.ex b/lib/elixir_ai/conversation_manager.ex index 2126283..471149f 100644 --- a/lib/elixir_ai/conversation_manager.ex +++ b/lib/elixir_ai/conversation_manager.ex @@ -20,7 +20,7 @@ defmodule ElixirAi.ConversationManager do def init(_) do Logger.info("ConversationManager initializing...") send(self(), :load_conversations) - {:ok, :loading_conversations} + {:ok, %{conversations: :loading, subscriptions: MapSet.new()}} end def create_conversation(name, ai_provider_id) do @@ -39,58 +39,77 @@ defmodule ElixirAi.ConversationManager do GenServer.call(@name, {:get_messages, name}) end - def handle_call(message, from, :loading_conversations) do + def handle_call(message, from, %{conversations: :loading} = state) do Logger.warning( "Received call #{inspect(message)} from #{inspect(from)} while loading conversations. Retrying after delay." ) Process.send_after(self(), {:retry_call, message, from}, 100) - {:noreply, :loading_conversations} + {:noreply, state} end - def handle_call({:create, name, ai_provider_id}, _from, conversations) do + def handle_call( + {:create, name, ai_provider_id}, + _from, + %{conversations: conversations, subscriptions: subscriptions} = state + ) do if Map.has_key?(conversations, name) do - {:reply, {:error, :already_exists}, conversations} + {:reply, {:error, :already_exists}, state} else case Conversation.create(name, ai_provider_id) do :ok -> - case start_and_subscribe(name) do - {:ok, _pid} = ok -> {:reply, ok, Map.put(conversations, name, [])} - error -> {:reply, error, conversations} + case start_and_subscribe(name, subscriptions) do + {:ok, pid, new_subscriptions} -> + {:reply, {:ok, pid}, + %{ + state + | conversations: Map.put(conversations, name, []), + subscriptions: new_subscriptions + }} + + {:error, _reason} = error -> + {:reply, error, state} end {:error, _} = error -> - {:reply, error, conversations} + {:reply, error, state} end end end - def handle_call({:open, name}, _from, conversations) do + def handle_call( + {:open, name}, + _from, + %{conversations: conversations, subscriptions: subscriptions} = state + ) do if Map.has_key?(conversations, name) do - case start_and_subscribe(name) do - {:ok, _pid} = ok -> {:reply, ok, conversations} - error -> {:reply, error, conversations} + case start_and_subscribe(name, subscriptions) do + {:ok, pid, new_subscriptions} -> + {:reply, {:ok, pid}, %{state | subscriptions: new_subscriptions}} + + {:error, _reason} = error -> + {:reply, error, state} end else - {:reply, {:error, :not_found}, conversations} + {:reply, {:error, :not_found}, state} end end - def handle_call(:list, _from, conversations) do + def handle_call(:list, _from, %{conversations: conversations} = state) do keys = Map.keys(conversations) Logger.debug( "list_conversations returning: #{inspect(keys, limit: :infinity, printable_limit: :infinity, binaries: :as_binaries)}" ) - {:reply, keys, conversations} + {:reply, keys, state} end - def handle_call({:get_messages, name}, _from, conversations) do - {:reply, Map.get(conversations, name, []), conversations} + def handle_call({:get_messages, name}, _from, %{conversations: conversations} = state) do + {:reply, Map.get(conversations, name, []), state} end - def handle_info({:store_message, name, message}, conversations) do + def handle_info({:store_message, name, message}, %{conversations: conversations} = state) do case Conversation.find_id(name) do {:ok, conv_id} -> Message.insert(conv_id, message, topic: ElixirAi.ChatRunner.message_topic(name)) @@ -99,17 +118,17 @@ defmodule ElixirAi.ConversationManager do :ok end - {:noreply, Map.update(conversations, name, [message], &(&1 ++ [message]))} + {:noreply, + %{state | conversations: Map.update(conversations, name, [message], &(&1 ++ [message]))}} end - def handle_info(:load_conversations, _conversations) do + def handle_info(:load_conversations, state) do conversation_list = Conversation.all_names() Logger.info("Loaded #{length(conversation_list)} conversations from DB") conversations = Map.new(conversation_list, fn %{name: name} -> {name, []} end) Logger.info("Conversation map keys: #{inspect(Map.keys(conversations))}") - # {:ok, conversations} - {:noreply, conversations} + {:noreply, %{state | conversations: conversations}} end def handle_info({:retry_call, message, from}, state) do @@ -123,7 +142,7 @@ defmodule ElixirAi.ConversationManager do end end - defp start_and_subscribe(name) do + defp start_and_subscribe(name, subscriptions) do result = case Horde.DynamicSupervisor.start_child( ElixirAi.ChatRunnerSupervisor, @@ -135,12 +154,19 @@ defmodule ElixirAi.ConversationManager do end case result do - {:ok, _pid} -> - Phoenix.PubSub.subscribe(ElixirAi.PubSub, ElixirAi.ChatRunner.message_topic(name)) - result + {:ok, pid} -> + new_subscriptions = + if MapSet.member?(subscriptions, name) do + subscriptions + else + Phoenix.PubSub.subscribe(ElixirAi.PubSub, ElixirAi.ChatRunner.message_topic(name)) + MapSet.put(subscriptions, name) + end - _ -> - result + {:ok, pid, new_subscriptions} + + error -> + error end end end diff --git a/lib/elixir_ai/data/ai_provider.ex b/lib/elixir_ai/data/ai_provider.ex index f7356a4..6526a86 100644 --- a/lib/elixir_ai/data/ai_provider.ex +++ b/lib/elixir_ai/data/ai_provider.ex @@ -28,44 +28,44 @@ defmodule ElixirAi.AiProvider do sql = "SELECT id, name, model_name FROM ai_providers" params = %{} - case DbHelpers.run_sql(sql, params, "ai_providers") do - {:error, :db_error} -> + case DbHelpers.run_sql(sql, params, "ai_providers", AiProviderSchema.partial_schema()) do + {:error, _} -> [] - result -> - results = - Enum.map(result.rows, fn [id, name, model_name] -> - attrs = %{id: id, name: name, model_name: model_name} |> convert_id_to_string() - - case Zoi.parse(AiProviderSchema.partial_schema(), attrs) do - {:ok, valid} -> - struct(AiProviderSchema, valid) - - {:error, errors} -> - Logger.error("Invalid provider data from DB: #{inspect(errors)}") - raise ArgumentError, "Invalid provider data: #{inspect(errors)}" - end - end) - - Logger.debug("AiProvider.all() returning: #{inspect(results)}") - - results + rows -> + rows + |> Enum.map(fn row -> + row |> convert_uuid_to_string() |> then(&struct(AiProviderSchema, &1)) + end) + |> tap(&Logger.debug("AiProvider.all() returning: #{inspect(&1)}")) end end - # Convert binary UUID to string for frontend - defp convert_id_to_string(%{id: id} = provider) when is_binary(id) do + defp convert_uuid_to_string(%{id: id} = provider) when is_binary(id) do %{provider | id: Ecto.UUID.cast!(id)} end - defp convert_id_to_string(provider), do: provider + defp convert_uuid_to_string(provider), do: provider def create(attrs) do now = DateTime.truncate(DateTime.utc_now(), :second) sql = """ - INSERT INTO ai_providers (name, model_name, api_token, completions_url, inserted_at, updated_at) - VALUES ($(name), $(model_name), $(api_token), $(completions_url), $(inserted_at), $(updated_at)) + INSERT INTO ai_providers ( + name, + model_name, + api_token, + completions_url, + inserted_at, + updated_at + ) VALUES ( + $(name), + $(model_name), + $(api_token), + $(completions_url), + $(inserted_at), + $(updated_at) + ) """ params = %{ @@ -102,32 +102,10 @@ defmodule ElixirAi.AiProvider do params = %{"name" => name} - case DbHelpers.run_sql(sql, params, "ai_providers") do - {:error, :db_error} -> - {:error, :db_error} - - %{rows: []} -> - {:error, :not_found} - - %{rows: [[id, name, model_name, api_token, completions_url] | _]} -> - attrs = - %{ - id: id, - name: name, - model_name: model_name, - api_token: api_token, - completions_url: completions_url - } - |> convert_id_to_string() - - case Zoi.parse(AiProviderSchema.schema(), attrs) do - {:ok, valid} -> - {:ok, struct(AiProviderSchema, valid)} - - {:error, errors} -> - Logger.error("Invalid provider data from DB: #{inspect(errors)}") - {:error, :invalid_data} - end + case DbHelpers.run_sql(sql, params, "ai_providers", AiProviderSchema.schema()) do + {:error, _} -> {:error, :db_error} + [] -> {:error, :not_found} + [row | _] -> {:ok, row |> convert_uuid_to_string() |> then(&struct(AiProviderSchema, &1))} end end @@ -139,9 +117,9 @@ defmodule ElixirAi.AiProvider do {:error, :db_error} -> {:error, :db_error} - result -> - case result.rows do - [[0]] -> + rows -> + case rows do + [%{"count" => 0}] -> attrs = %{ name: "default", model_name: Application.fetch_env!(:elixir_ai, :ai_model), diff --git a/lib/elixir_ai/data/conversation.ex b/lib/elixir_ai/data/conversation.ex index 0ab155f..da52d89 100644 --- a/lib/elixir_ai/data/conversation.ex +++ b/lib/elixir_ai/data/conversation.ex @@ -34,40 +34,26 @@ defmodule ElixirAi.Conversation do def all_names do sql = """ - SELECT c.name, p.name, p.model_name, p.api_token, p.completions_url + SELECT c.name, + json_build_object( + 'name', p.name, + 'model_name', p.model_name, + 'api_token', p.api_token, + 'completions_url', p.completions_url + ) as provider FROM conversations c LEFT JOIN ai_providers p ON c.ai_provider_id = p.id """ params = %{} - case DbHelpers.run_sql(sql, params, "conversations") do - {:error, :db_error} -> + case DbHelpers.run_sql(sql, params, "conversations", ConversationInfo.schema()) do + {:error, _} -> [] - result -> - Enum.map(result.rows, fn [name, provider_name, model_name, api_token, completions_url] -> - attrs = %{ - name: name, - provider: %{ - name: provider_name, - model_name: model_name, - api_token: api_token, - completions_url: completions_url - } - } - - case Zoi.parse(ConversationInfo.schema(), attrs) do - {:ok, valid} -> - struct( - ConversationInfo, - Map.put(valid, :provider, struct(Provider, valid.provider)) - ) - - {:error, errors} -> - Logger.error("Invalid conversation data: #{inspect(errors)}") - raise ArgumentError, "Invalid conversation data: #{inspect(errors)}" - end + rows -> + Enum.map(rows, fn row -> + struct(ConversationInfo, Map.put(row, :provider, struct(Provider, row.provider))) end) end end @@ -76,8 +62,17 @@ defmodule ElixirAi.Conversation do case Ecto.UUID.dump(ai_provider_id) do {:ok, binary_id} -> sql = """ - INSERT INTO conversations (name, ai_provider_id, inserted_at, updated_at) - VALUES ($(name), $(ai_provider_id), $(inserted_at), $(updated_at)) + INSERT INTO conversations ( + name, + ai_provider_id, + inserted_at, + updated_at) + VALUES ( + $(name), + $(ai_provider_id), + $(inserted_at), + $(updated_at) + ) """ timestamp = now() @@ -110,11 +105,11 @@ defmodule ElixirAi.Conversation do {:error, :db_error} -> {:error, :db_error} - %{rows: []} -> + [] -> {:error, :not_found} - %{rows: [[id] | _]} -> - {:ok, id} + [row | _] -> + {:ok, row["id"]} end end diff --git a/lib/elixir_ai/data/db_helpers.ex b/lib/elixir_ai/data/db_helpers.ex index 2f82b80..3f0958b 100644 --- a/lib/elixir_ai/data/db_helpers.ex +++ b/lib/elixir_ai/data/db_helpers.ex @@ -2,11 +2,21 @@ defmodule ElixirAi.Data.DbHelpers do require Logger @get_named_param ~r/\$\((\w+)\)/ + def run_sql(sql, params, topic, schema) do + run_sql(sql, params, topic) |> validate_rows(schema, topic) + end + def run_sql(sql, params, topic) do {sql, params} = named_params_to_positional_params(sql, params) try do - Ecto.Adapters.SQL.query!(ElixirAi.Repo, sql, params) + result = Ecto.Adapters.SQL.query!(ElixirAi.Repo, sql, params) + + # Transform rows to maps with column names as keys + Enum.map(result.rows, fn row -> + Enum.zip(result.columns, row) + |> Enum.into(%{}) + end) rescue exception -> Logger.error("Database error: #{Exception.message(exception)}") @@ -21,6 +31,31 @@ defmodule ElixirAi.Data.DbHelpers do end end + defp validate_rows({:error, :db_error}, _schema, _topic), do: {:error, :db_error} + + defp validate_rows(rows, schema, topic) do + rows + |> Enum.reduce_while({:ok, []}, fn row, {:ok, acc} -> + case Zoi.parse(schema, row, coerce: true) do + {:ok, valid} -> + {:cont, {:ok, [valid | acc]}} + + {:error, errors} -> + Logger.error("Schema validation error: #{inspect(errors)}") + {:halt, {:error, :validation_error}} + end + end) + |> then(fn + {:ok, valid_rows} -> + Enum.reverse(valid_rows) + + error -> + Logger.error("Validation error: #{inspect(error)}") + Phoenix.PubSub.broadcast(ElixirAi.PubSub, topic, {:sql_result_validation_error, error}) + error + end) + end + def named_params_to_positional_params(query, params) do param_occurrences = Regex.scan(@get_named_param, query) diff --git a/lib/elixir_ai/data/message.ex b/lib/elixir_ai/data/message.ex index 9966c8f..56860d3 100644 --- a/lib/elixir_ai/data/message.ex +++ b/lib/elixir_ai/data/message.ex @@ -31,17 +31,9 @@ defmodule ElixirAi.Message do {:error, :db_error} -> [] - result -> - Enum.map(result.rows, fn row -> - raw = %{ - role: Enum.at(row, 0), - content: Enum.at(row, 1), - reasoning_content: Enum.at(row, 2), - tool_calls: Enum.at(row, 3), - tool_call_id: Enum.at(row, 4) - } - - decoded = decode_message(raw) + rows -> + Enum.map(rows, fn row -> + decoded = decode_message(row) case Zoi.parse(MessageSchema.schema(), decoded) do {:ok, _valid} -> @@ -69,9 +61,22 @@ defmodule ElixirAi.Message do when is_binary(conversation_id) and byte_size(conversation_id) == 16 do sql = """ INSERT INTO messages ( - conversation_id, role, content, reasoning_content, - tool_calls, tool_call_id, inserted_at - ) VALUES ($(conversation_id), $(role), $(content), $(reasoning_content), $(tool_calls), $(tool_call_id), $(inserted_at)) + conversation_id, + role, + content, + reasoning_content, + tool_calls, + tool_call_id, + inserted_at + ) VALUES ( + $(conversation_id), + $(role), + $(content), + $(reasoning_content), + $(tool_calls), + $(tool_call_id), + $(inserted_at) + ) """ params = %{ @@ -114,6 +119,7 @@ defmodule ElixirAi.Message do defp decode_message(row) do row + |> Map.new(fn {k, v} -> {String.to_existing_atom(k), v} end) |> Map.update!(:role, &String.to_existing_atom/1) |> Map.update(:tool_calls, nil, fn nil -> diff --git a/test/stream_line_utils_test.exs b/test/stream_line_utils_test.exs new file mode 100644 index 0000000..2d8de9e --- /dev/null +++ b/test/stream_line_utils_test.exs @@ -0,0 +1,217 @@ +defmodule ElixirAi.AiUtils.StreamLineUtilsTest do + use ExUnit.Case + import ElixirAi.StreamChunkHelpers + alias ElixirAi.AiUtils.StreamLineUtils + + setup do + test_pid = self() + {:ok, server: test_pid} + end + + describe "Basic handling" do + test "handles empty string", %{server: server} do + assert :ok = StreamLineUtils.handle_stream_line(server, "") + refute_received _ + end + + test "handles [DONE] marker", %{server: server} do + assert :ok = StreamLineUtils.handle_stream_line(server, "data: [DONE]") + refute_received _ + end + + test "handles first streamed response with assistant role", %{server: server} do + line = start_response("chatcmpl-test") + + StreamLineUtils.handle_stream_line(server, line) + assert_received {:start_new_ai_response, "chatcmpl-test"} + end + + test "handles error response", %{server: server} do + line = error_chunk("API rate limit exceeded", "rate_limit_error") + + assert :ok = StreamLineUtils.handle_stream_line(server, line) + refute_received _ + end + + test "handles error response from JSON", %{server: server} do + json_string = ~s({"error":{"message":"Invalid request","type":"invalid_request_error"}}) + + assert :ok = StreamLineUtils.handle_stream_line(server, json_string) + refute_received _ + end + + test "handles unmatched message structure", %{server: server} do + line = ~s(data: {"choices":[],"id":"test"}) + + assert :ok = StreamLineUtils.handle_stream_line(server, line) + refute_received _ + end + + test "handles invalid JSON gracefully", %{server: server} do + line = "data: {invalid json}" + + assert :ok = StreamLineUtils.handle_stream_line(server, line) + refute_received _ + end + end + + describe "Reasoning content" do + test "handles single reasoning content chunk", %{server: server} do + line = reasoning_chunk("The") + + StreamLineUtils.handle_stream_line(server, line) + assert_received {:ai_reasoning_chunk, "chatcmpl-test", "The"} + end + + test "handles multiple reasoning content chunks", %{server: server} do + lines = [ + reasoning_chunk("The"), + reasoning_chunk(" user"), + reasoning_chunk(" asks") + ] + + for line <- lines do + StreamLineUtils.handle_stream_line(server, line) + end + + assert_received {:ai_reasoning_chunk, "chatcmpl-test", "The"} + assert_received {:ai_reasoning_chunk, "chatcmpl-test", " user"} + assert_received {:ai_reasoning_chunk, "chatcmpl-test", " asks"} + end + end + + describe "Text response content" do + test "handles single text content chunk", %{server: server} do + line = text_chunk("Hello") + + StreamLineUtils.handle_stream_line(server, line) + assert_received {:ai_text_chunk, "chatcmpl-test", "Hello"} + end + + test "handles multiple text content chunks", %{server: server} do + lines = [ + text_chunk("I"), + text_chunk("'m"), + text_chunk(" happy") + ] + + for line <- lines do + StreamLineUtils.handle_stream_line(server, line) + end + + assert_received {:ai_text_chunk, "chatcmpl-test", "I"} + assert_received {:ai_text_chunk, "chatcmpl-test", "'m"} + assert_received {:ai_text_chunk, "chatcmpl-test", " happy"} + end + + test "handles finish_reason stop", %{server: server} do + line = stop_chunk() + + StreamLineUtils.handle_stream_line(server, line) + assert_received {:ai_text_stream_finish, "chatcmpl-test"} + end + end + + describe "Tool calling" do + test "handles tool call start", %{server: server} do + line = tool_call_start_chunk("get_weather", ~s({"location), 0, "call_123") + + StreamLineUtils.handle_stream_line(server, line) + + assert_received {:ai_tool_call_start, "chatcmpl-test", + {"get_weather", ~s({"location), 0, "call_123"}} + end + + test "handles tool call middle", %{server: server} do + line = tool_call_middle_chunk(~s(": "San Francisco")) + + StreamLineUtils.handle_stream_line(server, line) + assert_received {:ai_tool_call_middle, "chatcmpl-test", {~s(": "San Francisco"), 0}} + end + + test "handles tool call end", %{server: server} do + line = tool_call_end_chunk() + + StreamLineUtils.handle_stream_line(server, line) + assert_received {:ai_tool_call_end, "chatcmpl-test"} + end + + test "handles complete tool call flow", %{server: server} do + # Start tool call + start_line = tool_call_start_chunk("get_weather", ~s({"location), 0, "call_123") + StreamLineUtils.handle_stream_line(server, start_line) + + assert_received {:ai_tool_call_start, "chatcmpl-test", + {"get_weather", ~s({"location), 0, "call_123"}} + + # Middle of tool call + middle_line = tool_call_middle_chunk(~s(": "NYC"})) + StreamLineUtils.handle_stream_line(server, middle_line) + assert_received {:ai_tool_call_middle, "chatcmpl-test", {~s(": "NYC"}), 0}} + + # End tool call + end_line = tool_call_end_chunk() + StreamLineUtils.handle_stream_line(server, end_line) + assert_received {:ai_tool_call_end, "chatcmpl-test"} + end + end + + describe "Integration tests" do + test "handles complete conversation flow", %{server: server} do + # Start + StreamLineUtils.handle_stream_line(server, start_response()) + assert_received {:start_new_ai_response, "chatcmpl-test"} + + # Reasoning chunks + StreamLineUtils.handle_stream_line(server, reasoning_chunk("Think")) + assert_received {:ai_reasoning_chunk, "chatcmpl-test", "Think"} + + StreamLineUtils.handle_stream_line(server, reasoning_chunk("ing...")) + assert_received {:ai_reasoning_chunk, "chatcmpl-test", "ing..."} + + # Content chunks + StreamLineUtils.handle_stream_line(server, text_chunk("Hello")) + assert_received {:ai_text_chunk, "chatcmpl-test", "Hello"} + + StreamLineUtils.handle_stream_line(server, text_chunk(" world")) + assert_received {:ai_text_chunk, "chatcmpl-test", " world"} + + # End + StreamLineUtils.handle_stream_line(server, stop_chunk()) + assert_received {:ai_text_stream_finish, "chatcmpl-test"} + + # Done marker + StreamLineUtils.handle_stream_line(server, "data: [DONE]") + refute_received _ + end + + test "handles conversation with tool call", %{server: server} do + # Start + StreamLineUtils.handle_stream_line(server, start_response()) + assert_received {:start_new_ai_response, "chatcmpl-test"} + + # Reasoning + StreamLineUtils.handle_stream_line(server, reasoning_chunk("Need to check weather")) + assert_received {:ai_reasoning_chunk, "chatcmpl-test", "Need to check weather"} + + # Tool call + StreamLineUtils.handle_stream_line( + server, + tool_call_start_chunk("get_weather", ~s({"loc), 0, "call_1") + ) + + assert_received {:ai_tool_call_start, "chatcmpl-test", + {"get_weather", ~s({"loc), 0, "call_1"}} + + StreamLineUtils.handle_stream_line(server, tool_call_middle_chunk(~s(ation"}))) + assert_received {:ai_tool_call_middle, "chatcmpl-test", {~s(ation"}), 0}} + + StreamLineUtils.handle_stream_line(server, tool_call_end_chunk()) + assert_received {:ai_tool_call_end, "chatcmpl-test"} + + # Done + StreamLineUtils.handle_stream_line(server, "data: [DONE]") + refute_received _ + end + end +end diff --git a/test/support/stream_chunk_helpers.ex b/test/support/stream_chunk_helpers.ex new file mode 100644 index 0000000..4f1cd0d --- /dev/null +++ b/test/support/stream_chunk_helpers.ex @@ -0,0 +1,157 @@ +defmodule ElixirAi.StreamChunkHelpers do + @moduledoc """ + Helper functions for creating SSE-formatted AI streaming response chunks for testing. + """ + + @doc """ + Creates a response start chunk with assistant role. + """ + def start_response(id \\ "chatcmpl-test") do + chunk = %{ + "choices" => [ + %{ + "finish_reason" => nil, + "index" => 0, + "delta" => %{"role" => "assistant", "content" => nil} + } + ], + "created" => 1_773_426_536, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates a reasoning content chunk. + """ + def reasoning_chunk(content, id \\ "chatcmpl-test") do + chunk = %{ + "choices" => [ + %{"finish_reason" => nil, "index" => 0, "delta" => %{"reasoning_content" => content}} + ], + "created" => 1_773_426_536, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates a text content chunk. + """ + def text_chunk(content, id \\ "chatcmpl-test") do + chunk = %{ + "choices" => [%{"finish_reason" => nil, "index" => 0, "delta" => %{"content" => content}}], + "created" => 1_773_426_537, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates a stop/finish chunk. + """ + def stop_chunk(id \\ "chatcmpl-test") do + chunk = %{ + "choices" => [%{"finish_reason" => "stop", "index" => 0, "delta" => %{}}], + "created" => 1_773_426_537, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates a tool call start chunk. + """ + def tool_call_start_chunk( + tool_name, + arguments, + index \\ 0, + tool_call_id, + id \\ "chatcmpl-test" + ) do + chunk = %{ + "choices" => [ + %{ + "finish_reason" => nil, + "index" => 0, + "delta" => %{ + "tool_calls" => [ + %{ + "id" => tool_call_id, + "index" => index, + "type" => "function", + "function" => %{"name" => tool_name, "arguments" => arguments} + } + ] + } + } + ], + "created" => 1_773_426_537, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates a tool call middle chunk (continuing arguments). + """ + def tool_call_middle_chunk(arguments, index \\ 0, id \\ "chatcmpl-test") do + chunk = %{ + "choices" => [ + %{ + "finish_reason" => nil, + "index" => 0, + "delta" => %{ + "tool_calls" => [ + %{"index" => index, "function" => %{"arguments" => arguments}} + ] + } + } + ], + "created" => 1_773_426_537, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates a tool call end chunk. + """ + def tool_call_end_chunk(id \\ "chatcmpl-test") do + chunk = %{ + "choices" => [%{"finish_reason" => "tool_calls", "index" => 0, "delta" => %{}}], + "created" => 1_773_426_537, + "id" => id, + "model" => "gpt-oss-120b-F16.gguf", + "object" => "chat.completion.chunk" + } + + "data: #{Jason.encode!(chunk)}" + end + + @doc """ + Creates an error chunk. + """ + def error_chunk(message, type \\ "invalid_request_error") do + chunk = %{"error" => %{"message" => message, "type" => type}} + "data: #{Jason.encode!(chunk)}" + end +end