From ef86bd08e816854a99fe2a0ae36bf2dfa0119357 Mon Sep 17 00:00:00 2001 From: Eli Bendersky Date: Mon, 29 Jan 2024 16:57:06 -0800 Subject: [PATCH] readme: update example with non-deprecated call (#588) --- README.md | 32 +++++++++++++------------ examples/openai-readme/go.mod | 11 +++++++++ examples/openai-readme/go.sum | 16 +++++++++++++ examples/openai-readme/openai-readme.go | 24 +++++++++++++++++++ 4 files changed, 68 insertions(+), 15 deletions(-) create mode 100644 examples/openai-readme/go.mod create mode 100644 examples/openai-readme/go.sum create mode 100644 examples/openai-readme/openai-readme.go diff --git a/README.md b/README.md index 2246f46de..b430767db 100644 --- a/README.md +++ b/README.md @@ -26,30 +26,32 @@ See [./examples](./examples) for example usage. package main import ( - "context" - "log" + "context" + "fmt" + "log" - "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/llms" + "github.com/tmc/langchaingo/llms/openai" ) func main() { - llm, err := openai.New() - if err != nil { - log.Fatal(err) - } - prompt := "What would be a good company name for a company that makes colorful socks?" - completion, err := llm.Call(context.Background(), prompt) - if err != nil { - log.Fatal(err) - } - log.Println(completion) + ctx := context.Background() + llm, err := openai.New() + if err != nil { + log.Fatal(err) + } + prompt := "What would be a good company name for a company that makes colorful socks?" + completion, err := llms.GenerateFromSinglePrompt(ctx, llm, prompt) + if err != nil { + log.Fatal(err) + } + fmt.Println(completion) } ``` ```shell $ go run . - -Socktastic! +Socktastic ``` # Resources diff --git a/examples/openai-readme/go.mod b/examples/openai-readme/go.mod new file mode 100644 index 000000000..f4ad5a98f --- /dev/null +++ b/examples/openai-readme/go.mod @@ -0,0 +1,11 @@ +module github.com/tmc/langchaingo/examples/openai-readme + +go 1.21 + +require github.com/tmc/langchaingo v0.1.4-alpha.0 + +require ( + github.com/dlclark/regexp2 v1.8.1 // indirect + github.com/google/uuid v1.4.0 // indirect + github.com/pkoukk/tiktoken-go v0.1.2 // indirect +) diff --git a/examples/openai-readme/go.sum b/examples/openai-readme/go.sum new file mode 100644 index 000000000..6a9f883f3 --- /dev/null +++ b/examples/openai-readme/go.sum @@ -0,0 +1,16 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.8.1 h1:6Lcdwya6GjPUNsBct8Lg/yRPwMhABj269AAzdGSiR+0= +github.com/dlclark/regexp2 v1.8.1/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= +github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/pkoukk/tiktoken-go v0.1.2 h1:u7PCSBiWJ3nJYoTGShyM9iHXz4dNyYkurwwp+GHtyHY= +github.com/pkoukk/tiktoken-go v0.1.2/go.mod h1:boMWvk9pQCOTx11pgu0DrIdrAKgQzzJKUP6vLXaz7Rw= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tmc/langchaingo v0.1.4-alpha.0 h1:WVu2KgHr9wloHAiMbbeytiv2W76vpkA59uUwf0EUgrQ= +github.com/tmc/langchaingo v0.1.4-alpha.0/go.mod h1:PKtJMXizDxJnT86q7lsVsyzTJqd0P2QKF7wt2jF6Lxk= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/examples/openai-readme/openai-readme.go b/examples/openai-readme/openai-readme.go new file mode 100644 index 000000000..6383b26b1 --- /dev/null +++ b/examples/openai-readme/openai-readme.go @@ -0,0 +1,24 @@ +package main + +import ( + "context" + "fmt" + "log" + + "github.com/tmc/langchaingo/llms" + "github.com/tmc/langchaingo/llms/openai" +) + +func main() { + ctx := context.Background() + llm, err := openai.New() + if err != nil { + log.Fatal(err) + } + prompt := "What would be a good company name for a company that makes colorful socks?" + completion, err := llms.GenerateFromSinglePrompt(ctx, llm, prompt) + if err != nil { + log.Fatal(err) + } + fmt.Println(completion) +}