ollama

package module
v0.30.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 8, 2024 License: MIT Imports: 9 Imported by: 0

Documentation

Index

Examples

Constants

View Source
const DefaultOllamaImage = "ollama/ollama:0.1.25"

Variables

This section is empty.

Functions

This section is empty.

Types

type OllamaContainer

type OllamaContainer struct {
	testcontainers.Container
}

OllamaContainer represents the Ollama container type used in the module

func RunContainer

func RunContainer(ctx context.Context, opts ...testcontainers.ContainerCustomizer) (*OllamaContainer, error)

RunContainer creates an instance of the Ollama container type

Example
// runOllamaContainer {
ctx := context.Background()

ollamaContainer, err := tcollama.RunContainer(ctx, testcontainers.WithImage("ollama/ollama:0.1.25"))
if err != nil {
	log.Fatalf("failed to start container: %s", err)
}

// Clean up the container
defer func() {
	if err := ollamaContainer.Terminate(ctx); err != nil {
		log.Fatalf("failed to terminate container: %s", err) // nolint:gocritic
	}
}()
// }

state, err := ollamaContainer.State(ctx)
if err != nil {
	log.Fatalf("failed to get container state: %s", err) // nolint:gocritic
}

fmt.Println(state.Running)
Output:

true
Example (WithModel_llama2_http)
// withHTTPModelLlama2 {
ctx := context.Background()

ollamaContainer, err := tcollama.RunContainer(
	ctx,
	testcontainers.WithImage("ollama/ollama:0.1.25"),
)
if err != nil {
	log.Fatalf("failed to start container: %s", err)
}
defer func() {
	if err := ollamaContainer.Terminate(ctx); err != nil {
		log.Fatalf("failed to terminate container: %s", err) // nolint:gocritic
	}
}()

model := "llama2"

_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "pull", model})
if err != nil {
	log.Fatalf("failed to pull model %s: %s", model, err)
}

_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model})
if err != nil {
	log.Fatalf("failed to run model %s: %s", model, err)
}

connectionStr, err := ollamaContainer.ConnectionString(ctx)
if err != nil {
	log.Fatalf("failed to get connection string: %s", err) // nolint:gocritic
}

httpClient := &http.Client{}

// generate a response
payload := `{
	"model": "llama2",
	"prompt":"Why is the sky blue?"
}`

req, err := http.NewRequest("POST", fmt.Sprintf("%s/api/generate", connectionStr), strings.NewReader(payload))
if err != nil {
	log.Fatalf("failed to create request: %s", err) // nolint:gocritic
}

resp, err := httpClient.Do(req)
if err != nil {
	log.Fatalf("failed to get response: %s", err) // nolint:gocritic
}
// }

fmt.Println(resp.StatusCode)

// Intentionally not asserting the output, as we don't want to run this example in the tests.
Output:

Example (WithModel_llama2_langchain)
// withLangchainModelLlama2 {
ctx := context.Background()

ollamaContainer, err := tcollama.RunContainer(
	ctx,
	testcontainers.WithImage("ollama/ollama:0.1.25"),
)
if err != nil {
	log.Fatalf("failed to start container: %s", err)
}
defer func() {
	if err := ollamaContainer.Terminate(ctx); err != nil {
		log.Fatalf("failed to terminate container: %s", err) // nolint:gocritic
	}
}()

model := "llama2"

_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "pull", model})
if err != nil {
	log.Fatalf("failed to pull model %s: %s", model, err)
}

_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model})
if err != nil {
	log.Fatalf("failed to run model %s: %s", model, err)
}

connectionStr, err := ollamaContainer.ConnectionString(ctx)
if err != nil {
	log.Fatalf("failed to get connection string: %s", err) // nolint:gocritic
}

var llm *langchainollama.LLM
if llm, err = langchainollama.New(
	langchainollama.WithModel(model),
	langchainollama.WithServerURL(connectionStr),
); err != nil {
	log.Fatalf("failed to create langchain ollama: %s", err) // nolint:gocritic
}

completion, err := llm.Call(
	context.Background(),
	"how can Testcontainers help with testing?",
	llms.WithSeed(42),         // the lower the seed, the more deterministic the completion
	llms.WithTemperature(0.0), // the lower the temperature, the more creative the completion
)
if err != nil {
	log.Fatalf("failed to create langchain ollama: %s", err) // nolint:gocritic
}

words := []string{
	"easy", "isolation", "consistency",
}
lwCompletion := strings.ToLower(completion)

for _, word := range words {
	if strings.Contains(lwCompletion, word) {
		fmt.Println(true)
	}
}

// }

// Intentionally not asserting the output, as we don't want to run this example in the tests.
Output:

func (*OllamaContainer) Commit

func (c *OllamaContainer) Commit(ctx context.Context, targetImage string) error

Commit it commits the current file system changes in the container into a new target image. The target image name should be unique, as this method will commit the current state of the container into a new image with the given name, so it doesn't override existing images. It should be used for creating an image that contains a loaded model.

func (*OllamaContainer) ConnectionString

func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error)

ConnectionString returns the connection string for the Ollama container, using the default port 11434.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL