-
-
Notifications
You must be signed in to change notification settings - Fork 693
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
docs: re-add llamafile example (#693)
docs: Llamafile example
- Loading branch information
1 parent
09ac6e0
commit d161fc2
Showing
4 changed files
with
79 additions
and
0 deletions.
There are no files selected for viewing
17 changes: 17 additions & 0 deletions
17
docs/docs/modules/model_io/models/llms/Integrations/llamafile.mdx
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
--- | ||
sidebar_label: Llamafile | ||
--- | ||
import CodeBlock from "@theme/CodeBlock"; | ||
import ExampleLlamafile from "@examples/llamafile-completion-example/llamafile_completion_example.go"; | ||
|
||
# Llamafile | ||
|
||
## Running Server | ||
first you need have a server running. | ||
|
||
```sh | ||
./mistral-7b-instruct-v0.2.Q3_K_L.llamafile --server --nobrowser --embedding | ||
``` | ||
|
||
|
||
<CodeBlock language="go">{ExampleLlamafile}</CodeBlock> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
module github.com/tmc/langchaingo/examples/llamafile-completion-example | ||
|
||
go 1.20 | ||
|
||
require github.com/tmc/langchaingo v0.1.6 | ||
|
||
require ( | ||
github.com/dlclark/regexp2 v1.10.0 // indirect | ||
github.com/google/uuid v1.6.0 // indirect | ||
github.com/pkoukk/tiktoken-go v0.1.6 // indirect | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0= | ||
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= | ||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= | ||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||
github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw= | ||
github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg= | ||
github.com/tmc/langchaingo v0.1.6 h1:n7sce0/ougio+wbYCQ84IX1tLuaUC+mKO6WKttPVRWk= | ||
github.com/tmc/langchaingo v0.1.6/go.mod h1:lPpWPoAud+yQowJNRZhdtRbQCSHKF+jRxd0gU58GDHU= |
43 changes: 43 additions & 0 deletions
43
examples/llamafile-completion-example/llamafile_completion_example.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
package main | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
|
||
"github.com/tmc/langchaingo/llms" | ||
"github.com/tmc/langchaingo/llms/llamafile" | ||
"github.com/tmc/langchaingo/schema" | ||
) | ||
|
||
func main() { | ||
|
||
options := []llamafile.Option{ | ||
llamafile.WithEmbeddingSize(2048), | ||
llamafile.WithTemperature(0.8), | ||
} | ||
llm, err := llamafile.New(options...) | ||
|
||
if err != nil { | ||
panic(err) | ||
} | ||
|
||
parts := []llms.ContentPart{ | ||
llms.TextContent{Text: "Brazil is a country? answer yes or no"}, | ||
} | ||
content := []llms.MessageContent{ | ||
{ | ||
Role: schema.ChatMessageTypeHuman, | ||
Parts: parts, | ||
}, | ||
} | ||
|
||
_, err = llm.GenerateContent(context.Background(), content, | ||
llms.WithStreamingFunc(func(ctx context.Context, chunk []byte) error { | ||
fmt.Print(string(chunk)) | ||
return nil | ||
})) | ||
|
||
if err != nil { | ||
panic(err) | ||
} | ||
} |