refactoring folders
Some checks failed
CI/CD Pipeline / build (push) Failing after 8s

This commit is contained in:
2026-03-25 12:05:56 -06:00
parent d857e91241
commit 0041c25f19
32 changed files with 139 additions and 78 deletions

View File

@@ -0,0 +1,128 @@
defmodule ElixirAi.ChatUtils do
require Logger
import ElixirAi.AiUtils.StreamLineUtils
def ai_tool(
name: name,
description: description,
function: function,
parameters: parameters,
server: server
) do
schema = %{
"type" => "function",
"function" => %{
"name" => name,
"description" => description,
"parameters" => parameters
# %{
# "type" => "object",
# "properties" => %{
# "name" => %{"type" => "string"},
# "value" => %{"type" => "string"}
# },
# "required" => ["name", "value"]
# }
}
}
run_function = fn current_message_id, tool_call_id, args ->
Task.start_link(fn ->
try do
result = function.(args)
send(server, {:stream, {:tool_response, current_message_id, tool_call_id, result}})
rescue
e ->
reason = Exception.format(:error, e, __STACKTRACE__)
Logger.error("Tool task crashed: #{reason}")
send(
server,
{:stream, {:tool_response, current_message_id, tool_call_id, {:error, reason}}}
)
end
end)
end
%{
name: name,
definition: schema,
run_function: run_function
}
end
def request_ai_response(server, messages, tools, provider, tool_choice \\ "auto") do
Task.start_link(fn ->
api_url = provider.completions_url
api_key = provider.api_token
model = provider.model_name
if is_nil(api_url) or api_url == "" do
Logger.warning("AI endpoint is empty or nil")
end
if is_nil(api_key) or api_key == "" do
Logger.warning("AI token is empty or nil")
end
if is_nil(model) or model == "" do
Logger.warning("AI model is empty or nil")
end
body = %{
model: model,
stream: true,
messages: messages |> Enum.map(&api_message/1),
tools: Enum.map(tools, & &1.definition),
tool_choice: tool_choice
}
headers = [{"authorization", "Bearer #{api_key}"}]
case Req.post(api_url,
json: body,
headers: headers,
into: fn {:data, data}, acc ->
data
|> String.split("\n")
|> Enum.each(&handle_stream_line(server, &1))
{:cont, acc}
end
) do
{:ok, _response} ->
:ok
{:error, reason} ->
Logger.warning("AI request failed: #{inspect(reason)} for #{api_url}")
send(server, {:stream, {:ai_request_error, reason}})
end
end)
end
def api_message(%{role: :assistant, tool_calls: [_ | _] = tool_calls} = msg) do
%{
role: "assistant",
content: Map.get(msg, :content, ""),
tool_calls:
Enum.map(tool_calls, fn call ->
%{
id: call.id,
type: "function",
function: %{
name: call.name,
arguments: call.arguments
}
}
end)
}
end
def api_message(%{role: :tool, tool_call_id: tool_call_id, content: content}) do
%{role: "tool", tool_call_id: tool_call_id, content: content}
end
def api_message(%{role: role, content: content}) do
%{role: Atom.to_string(role), content: content}
end
end

View File

@@ -0,0 +1,158 @@
defmodule ElixirAi.AiUtils.StreamLineUtils do
require Logger
def handle_stream_line(_server, "") do
:ok
end
def handle_stream_line(_server, "data: [DONE]") do
# send(server, :ai_stream_done)
:ok
end
def handle_stream_line(server, "data: " <> json) do
case Jason.decode(json) do
{:ok, body} ->
# Logger.debug("Received AI chunk: #{inspect(body)}")
handle_stream_line(server, body)
other ->
Logger.error("Failed to decode AI response chunk: #{inspect(other)}")
:ok
end
end
# first streamed response
def handle_stream_line(server, %{
"choices" => [%{"delta" => %{"content" => nil, "role" => "assistant"}}],
"id" => id
}) do
send(
server,
{:stream, {:start_new_ai_response, id}}
)
end
# last streamed response
def handle_stream_line(
server,
%{
"choices" => [%{"finish_reason" => "stop"}],
"id" => id
} = _msg
) do
# Logger.info("Received end of AI response stream for id #{id} with message: #{inspect(msg)}")
send(
server,
{:stream, {:ai_text_stream_finish, id}}
)
end
# streamed in reasoning
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{"reasoning_content" => reasoning_content},
"finish_reason" => nil
}
],
"id" => id
}) do
send(
server,
{:stream, {:ai_reasoning_chunk, id, reasoning_content}}
)
end
# streamed in text
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{"content" => reasoning_content},
"finish_reason" => nil
}
],
"id" => id
}) do
send(
server,
{:stream, {:ai_text_chunk, id, reasoning_content}}
)
end
# start and middle tool call
def handle_stream_line(server, %{
"choices" => [
%{
"delta" => %{
"tool_calls" => tool_calls
},
"finish_reason" => nil
}
],
"id" => id
})
when is_list(tool_calls) do
Enum.each(tool_calls, fn
%{
"id" => tool_call_id,
"index" => tool_index,
"type" => "function",
"function" => %{"name" => tool_name, "arguments" => tool_args_start}
} ->
# Logger.info("Received tool call start for tool #{tool_name}")
send(
server,
{:stream,
{:ai_tool_call_start, id, {tool_name, tool_args_start, tool_index, tool_call_id}}}
)
%{"index" => tool_index, "function" => %{"arguments" => tool_args_diff}} ->
# Logger.info("Received tool call middle for index #{tool_index}")
send(server, {:stream, {:ai_tool_call_middle, id, {tool_args_diff, tool_index}}})
other ->
Logger.warning("Unmatched tool call item: #{inspect(other)}")
end)
end
# end tool call
def handle_stream_line(
server,
%{
"choices" => [%{"finish_reason" => "tool_calls"}],
"id" => id
}
) do
# Logger.info("Received tool_calls_finished with message: #{inspect(message)}")
send(server, {:stream, {:ai_tool_call_end, id}})
end
def handle_stream_line(_server, %{"error" => error_info}) do
Logger.error("Received error from AI stream: #{inspect(error_info)}")
:ok
end
def handle_stream_line(server, "proxy error" <> _ = error) when is_binary(error) do
Logger.error("Proxy error in AI stream: #{error}")
send(server, {:stream, {:ai_request_error, error}})
end
def handle_stream_line(server, json) when is_binary(json) do
case Jason.decode(json) do
{:ok, body} ->
handle_stream_line(server, body)
_ ->
Logger.warning("Received unmatched stream line: #{inspect(json)}")
:ok
end
end
def handle_stream_line(_server, unmatched_message) do
Logger.warning("Received unmatched stream line: #{inspect(unmatched_message)}")
:ok
end
end