Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 73 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -151,18 +151,88 @@ ensure the session is maintained across multiple calls.

### Embedding Generation

TODO
You can generate embedding vectors using an appropriate model with Ollama or Mistral models:

```go
import (
"github.com/mutablelogic/go-llm"
)

func embedding(ctx context.Context, agent llm.Agent) error {
// Create a new chat session
vector, err := agent.Model(ctx, "mistral-embed").Embedding(ctx, "hello")
// ...
}
```

### Attachments & Image Caption Generation

TODO
Some models have `vision` capability and others can also summarize text. For example, to
generate captions for an image,

```go
import (
"github.com/mutablelogic/go-llm"
)

func generate_image_caption(ctx context.Context, agent llm.Agent, path string) (string, error) {
f, err := os.Open(path)
if err != nil {
return "", err
}
defer f.Close()

// Describe an image
r, err := agent.Model("claude-3-5-sonnet-20241022").UserPrompt(
ctx, model.UserPrompt("Provide a short caption for this image", llm.WithAttachment(f))
)
if err != nil {
return "", err
}

// Return success
return r.Text(0), err
}
```

To summarize a text or PDF docment is exactly the same using an Anthropic model, but maybe with a
different prompt.

### Streaming

TODO
Streaming is supported with all providers, but Ollama cannot be used with streaming and tools
simultaneously. You provide a callback function of signature `func(llm.Completion)` which will
be called as a completion is received.

```go
import (
"github.com/mutablelogic/go-llm"
)

func generate_completion(ctx context.Context, agent llm.Agent, prompt string) (string, error) {
r, err := agent.Model("claude-3-5-sonnet-20241022").UserPrompt(
ctx, model.UserPrompt("What is the weather in London?"),
llm.WithStream(stream_callback),
)
if err != nil {
return "", err
}

// Return success
return r.Text(0), err
}

func stream_callback(completion llm.Completion) {
// Print out the completion text on each call
fmt.Println(completion.Text(0))
}

```

### Tool Support

All providers support tools, but not all models.

TODO

## Options
Expand Down
8 changes: 3 additions & 5 deletions cmd/llm/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,9 @@ func main() {
if cli.OllamaEndpoint != "" {
opts = append(opts, agent.WithOllama(cli.OllamaEndpoint, clientopts...))
}
/*
if cli.AnthropicKey != "" {
opts = append(opts, agent.WithAnthropic(cli.AnthropicKey, clientopts...))
}
*/
if cli.AnthropicKey != "" {
opts = append(opts, agent.WithAnthropic(cli.AnthropicKey, clientopts...))
}
if cli.MistralKey != "" {
opts = append(opts, agent.WithMistral(cli.MistralKey, clientopts...))
}
Expand Down
20 changes: 10 additions & 10 deletions pkg/agent/opt.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
// Packages
client "github.com/mutablelogic/go-client"
llm "github.com/mutablelogic/go-llm"
"github.com/mutablelogic/go-llm/pkg/anthropic"
mistral "github.com/mutablelogic/go-llm/pkg/mistral"
ollama "github.com/mutablelogic/go-llm/pkg/ollama"
)
Expand All @@ -22,18 +23,17 @@ func WithOllama(endpoint string, opts ...client.ClientOpt) llm.Opt {
}
}

/*
func WithAnthropic(key string, opts ...client.ClientOpt) llm.Opt {
return func(o *llm.Opts) error {
client, err := anthropic.New(key, opts...)
if err != nil {
return err
} else {
return llm.WithAgent(client)(o)
}
func WithAnthropic(key string, opts ...client.ClientOpt) llm.Opt {
return func(o *llm.Opts) error {
client, err := anthropic.New(key, opts...)
if err != nil {
return err
} else {
return llm.WithAgent(client)(o)
}
}
*/
}

func WithMistral(key string, opts ...client.ClientOpt) llm.Opt {
return func(o *llm.Opts) error {
client, err := mistral.New(key, opts...)
Expand Down
40 changes: 36 additions & 4 deletions pkg/anthropic/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ package anthropic

import (
// Packages
"context"

client "github.com/mutablelogic/go-client"
llm "github.com/mutablelogic/go-llm"
)
Expand Down Expand Up @@ -42,10 +44,7 @@ func New(ApiKey string, opts ...client.ClientOpt) (*Client, error) {
}

// Return the client
return &Client{
Client: client,
cache: make(map[string]llm.Model),
}, nil
return &Client{client, nil}, nil
}

///////////////////////////////////////////////////////////////////////////////
Expand All @@ -55,3 +54,36 @@ func New(ApiKey string, opts ...client.ClientOpt) (*Client, error) {
func (*Client) Name() string {
return defaultName
}

// Return the models
func (anthropic *Client) Models(ctx context.Context) ([]llm.Model, error) {
// Cache models
if anthropic.cache == nil {
models, err := anthropic.ListModels(ctx)
if err != nil {
return nil, err
}
anthropic.cache = make(map[string]llm.Model, len(models))
for _, model := range models {
anthropic.cache[model.Name()] = model
}
}

// Return models
result := make([]llm.Model, 0, len(anthropic.cache))
for _, model := range anthropic.cache {
result = append(result, model)
}
return result, nil
}

// Return a model by name, or nil if not found.
// Panics on error.
func (anthropic *Client) Model(ctx context.Context, name string) llm.Model {
if anthropic.cache == nil {
if _, err := anthropic.Models(ctx); err != nil {
panic(err)
}
}
return anthropic.cache[name]
}
54 changes: 40 additions & 14 deletions pkg/anthropic/client_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
package anthropic_test

import (
"flag"
"log"
"os"
"strconv"
"testing"

// Packages
Expand All @@ -10,23 +13,46 @@ import (
assert "github.com/stretchr/testify/assert"
)

func Test_client_001(t *testing.T) {
assert := assert.New(t)
client, err := anthropic.New(GetApiKey(t), opts.OptTrace(os.Stderr, true))
if assert.NoError(err) {
assert.NotNil(client)
t.Log(client)
///////////////////////////////////////////////////////////////////////////////
// TEST SET-UP

var (
client *anthropic.Client
)

func TestMain(m *testing.M) {
var verbose bool

// Verbose output
flag.Parse()
if f := flag.Lookup("test.v"); f != nil {
if v, err := strconv.ParseBool(f.Value.String()); err == nil {
verbose = v
}
}

// API KEY
api_key := os.Getenv("ANTHROPIC_API_KEY")
if api_key == "" {
log.Print("ANTHROPIC_API_KEY not set")
os.Exit(0)
}

// Create client
var err error
client, err = anthropic.New(api_key, opts.OptTrace(os.Stderr, verbose))
if err != nil {
log.Println(err)
os.Exit(-1)
}
os.Exit(m.Run())
}

///////////////////////////////////////////////////////////////////////////////
// ENVIRONMENT
// TESTS

func GetApiKey(t *testing.T) string {
key := os.Getenv("ANTHROPIC_API_KEY")
if key == "" {
t.Skip("ANTHROPIC_API_KEY not set, skipping tests")
t.SkipNow()
}
return key
func Test_client_001(t *testing.T) {
assert := assert.New(t)
assert.NotNil(client)
t.Log(client)
}
Loading