feat: add OpenAI-compat adapter for Ollama and llama.cpp

Thin wrapper over OpenAI adapter with custom base URLs.
Ollama: localhost:11434/v1, llama.cpp: localhost:8080/v1.
No API key required for local providers.

Fixed: initial tool call args captured on first chunk
(Ollama sends complete args in one chunk, not as deltas).

Live verified: text + tool calling with qwen3:14b on Ollama.
Five providers now live: Mistral, Anthropic, OpenAI, Google, Ollama.
This commit is contained in:
2026-04-03 13:47:30 +02:00
parent d26b07c509
commit 54ae24d11c
3 changed files with 54 additions and 6 deletions

View File

@@ -16,6 +16,7 @@ import (
"somegit.dev/Owlibou/gnoma/internal/provider/mistral"
googleprov "somegit.dev/Owlibou/gnoma/internal/provider/google"
oaiprov "somegit.dev/Owlibou/gnoma/internal/provider/openai"
"somegit.dev/Owlibou/gnoma/internal/provider/openaicompat"
"somegit.dev/Owlibou/gnoma/internal/stream"
"somegit.dev/Owlibou/gnoma/internal/tool"
"somegit.dev/Owlibou/gnoma/internal/tool/bash"
@@ -46,12 +47,13 @@ func main() {
}
logger := slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: logLevel}))
// Resolve API key
// Resolve API key (local providers don't need one)
key := *apiKey
if key == "" {
key = resolveAPIKey(*providerName)
}
if key == "" {
localProviders := map[string]bool{"ollama": true, "llamacpp": true}
if key == "" && !localProviders[*providerName] {
fmt.Fprintf(os.Stderr, "error: no API key for provider %q\nSet %s environment variable or use --api-key\n",
*providerName, envKeyFor(*providerName))
os.Exit(1)
@@ -198,8 +200,12 @@ func createProvider(name, apiKey, model string) (provider.Provider, error) {
return oaiprov.New(cfg)
case "google":
return googleprov.New(cfg)
case "ollama":
return openaicompat.NewOllama(cfg)
case "llamacpp":
return openaicompat.NewLlamaCpp(cfg)
default:
return nil, fmt.Errorf("unknown provider %q (supports: mistral, anthropic, openai, google)", name)
return nil, fmt.Errorf("unknown provider %q (supports: mistral, anthropic, openai, google, ollama, llamacpp)", name)
}
}

View File

@@ -72,10 +72,11 @@ func (s *openaiStream) Next() bool {
for _, tc := range delta.ToolCalls {
existing, ok := s.toolCalls[tc.Index]
if !ok {
// New tool call
// New tool call — capture initial arguments too
existing = &toolCallState{
id: tc.ID,
name: tc.Function.Name,
args: tc.Function.Arguments,
}
s.toolCalls[tc.Index] = existing
s.hadToolCalls = true
@@ -90,8 +91,8 @@ func (s *openaiStream) Next() bool {
}
}
// Accumulate arguments
if tc.Function.Arguments != "" {
// Accumulate arguments (subsequent chunks)
if tc.Function.Arguments != "" && ok {
existing.args += tc.Function.Arguments
s.cur = stream.Event{
Type: stream.EventToolCallDelta,

View File

@@ -0,0 +1,41 @@
// Package openaicompat provides OpenAI-compatible provider adapters
// for Ollama, llama.cpp, and other servers that implement the OpenAI API.
package openaicompat
import (
"somegit.dev/Owlibou/gnoma/internal/provider"
oaiprov "somegit.dev/Owlibou/gnoma/internal/provider/openai"
)
const (
ollamaDefaultURL = "http://localhost:11434/v1"
llamacppDefaultURL = "http://localhost:8080/v1"
)
// NewOllama creates a provider for a local Ollama instance.
func NewOllama(cfg provider.ProviderConfig) (provider.Provider, error) {
if cfg.BaseURL == "" {
cfg.BaseURL = ollamaDefaultURL
}
if cfg.APIKey == "" {
cfg.APIKey = "ollama" // Ollama doesn't require a real key
}
if cfg.Model == "" {
cfg.Model = "qwen3:8b"
}
return oaiprov.New(cfg)
}
// NewLlamaCpp creates a provider for a local llama.cpp server.
func NewLlamaCpp(cfg provider.ProviderConfig) (provider.Provider, error) {
if cfg.BaseURL == "" {
cfg.BaseURL = llamacppDefaultURL
}
if cfg.APIKey == "" {
cfg.APIKey = "llamacpp" // llama.cpp doesn't require a real key
}
if cfg.Model == "" {
cfg.Model = "default"
}
return oaiprov.New(cfg)
}