diff --git a/cmd/gnoma/main.go b/cmd/gnoma/main.go index 9723f41..ce10268 100644 --- a/cmd/gnoma/main.go +++ b/cmd/gnoma/main.go @@ -16,6 +16,7 @@ import ( "somegit.dev/Owlibou/gnoma/internal/provider/mistral" googleprov "somegit.dev/Owlibou/gnoma/internal/provider/google" oaiprov "somegit.dev/Owlibou/gnoma/internal/provider/openai" + "somegit.dev/Owlibou/gnoma/internal/provider/openaicompat" "somegit.dev/Owlibou/gnoma/internal/stream" "somegit.dev/Owlibou/gnoma/internal/tool" "somegit.dev/Owlibou/gnoma/internal/tool/bash" @@ -46,12 +47,13 @@ func main() { } logger := slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: logLevel})) - // Resolve API key + // Resolve API key (local providers don't need one) key := *apiKey if key == "" { key = resolveAPIKey(*providerName) } - if key == "" { + localProviders := map[string]bool{"ollama": true, "llamacpp": true} + if key == "" && !localProviders[*providerName] { fmt.Fprintf(os.Stderr, "error: no API key for provider %q\nSet %s environment variable or use --api-key\n", *providerName, envKeyFor(*providerName)) os.Exit(1) @@ -198,8 +200,12 @@ func createProvider(name, apiKey, model string) (provider.Provider, error) { return oaiprov.New(cfg) case "google": return googleprov.New(cfg) + case "ollama": + return openaicompat.NewOllama(cfg) + case "llamacpp": + return openaicompat.NewLlamaCpp(cfg) default: - return nil, fmt.Errorf("unknown provider %q (supports: mistral, anthropic, openai, google)", name) + return nil, fmt.Errorf("unknown provider %q (supports: mistral, anthropic, openai, google, ollama, llamacpp)", name) } } diff --git a/internal/provider/openai/stream.go b/internal/provider/openai/stream.go index 7d2b565..578f507 100644 --- a/internal/provider/openai/stream.go +++ b/internal/provider/openai/stream.go @@ -72,10 +72,11 @@ func (s *openaiStream) Next() bool { for _, tc := range delta.ToolCalls { existing, ok := s.toolCalls[tc.Index] if !ok { - // New tool call + // New tool call — capture initial arguments too existing = &toolCallState{ id: tc.ID, name: tc.Function.Name, + args: tc.Function.Arguments, } s.toolCalls[tc.Index] = existing s.hadToolCalls = true @@ -90,8 +91,8 @@ func (s *openaiStream) Next() bool { } } - // Accumulate arguments - if tc.Function.Arguments != "" { + // Accumulate arguments (subsequent chunks) + if tc.Function.Arguments != "" && ok { existing.args += tc.Function.Arguments s.cur = stream.Event{ Type: stream.EventToolCallDelta, diff --git a/internal/provider/openaicompat/provider.go b/internal/provider/openaicompat/provider.go new file mode 100644 index 0000000..627d1d1 --- /dev/null +++ b/internal/provider/openaicompat/provider.go @@ -0,0 +1,41 @@ +// Package openaicompat provides OpenAI-compatible provider adapters +// for Ollama, llama.cpp, and other servers that implement the OpenAI API. +package openaicompat + +import ( + "somegit.dev/Owlibou/gnoma/internal/provider" + oaiprov "somegit.dev/Owlibou/gnoma/internal/provider/openai" +) + +const ( + ollamaDefaultURL = "http://localhost:11434/v1" + llamacppDefaultURL = "http://localhost:8080/v1" +) + +// NewOllama creates a provider for a local Ollama instance. +func NewOllama(cfg provider.ProviderConfig) (provider.Provider, error) { + if cfg.BaseURL == "" { + cfg.BaseURL = ollamaDefaultURL + } + if cfg.APIKey == "" { + cfg.APIKey = "ollama" // Ollama doesn't require a real key + } + if cfg.Model == "" { + cfg.Model = "qwen3:8b" + } + return oaiprov.New(cfg) +} + +// NewLlamaCpp creates a provider for a local llama.cpp server. +func NewLlamaCpp(cfg provider.ProviderConfig) (provider.Provider, error) { + if cfg.BaseURL == "" { + cfg.BaseURL = llamacppDefaultURL + } + if cfg.APIKey == "" { + cfg.APIKey = "llamacpp" // llama.cpp doesn't require a real key + } + if cfg.Model == "" { + cfg.Model = "default" + } + return oaiprov.New(cfg) +}