From d9c3a1df036301b542cc87236646b17f67351cbc Mon Sep 17 00:00:00 2001 From: Marcel Claus Date: Fri, 12 May 2023 10:06:05 +0200 Subject: [PATCH] better package desc for npm --- README.md | 13 +++++++------ package.json | 12 +++++++++--- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 6ac119d..b6ba449 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,14 @@ ![Banner image](https://user-images.githubusercontent.com/10284570/173569848-c624317f-42b1-45a6-ab09-f0ea3c247648.png) -# n8n-nodes-gpt-tokenizer +# Work with BPE Tokens in n8n with the GPT-Tokenizer Node -# Work with BPE Tokens in n8n - -This community package contains a node to work with BPE Tokens such as OpenAI's GPT models use under the hood. -As a matter of fact this node works just fine with the OpenAI Node. +This community package contains a node to work with BPE Tokens such as OpenAI's GPT models use under the hood. As a matter of fact this node works just fine with the OpenAI Node. You can: * Encode a string into BPE Tokens (may be cool for custom training) * Decode an array of BPE Tokens back to a string (for funzies?) * Determine a strings token length before submitting to the OpenAI API +* Calculate costs before submitting to OpenAI API * Split a text into chunks which match exactly a definable Token Limit [n8n](https://n8n.io/) is a [fair-code licensed](https://docs.n8n.io/reference/license/) workflow automation platform. @@ -27,7 +25,7 @@ You can: | ------------- | ------------- | ------------- | | Encode | Encode a string into BPE Tokens. Returns an array of Tokens. | - | | Decode | Decode an array of BPE Tokens into a string. Returns a string. | - | -| Count Tokens | Count the tokens a string produces. Return a the number of tokens. | - | +| Count Tokens | Count the tokens a string produces. Return the number of tokens. | - | | Check Token Limit | Wheather a given string exceeds a defined Token Limit. Returns a boolean. | Optional: throw an error if the Token Limit is exceeded. | | Slice to Max Token Limit | Slice the string into block which match exactly the provided token limit. Returns an array of strings. | - | @@ -48,5 +46,8 @@ I hope you are enyoing these nodes. If you are in need of a smooth automation, s ## Version History +### 0.1.1 +- just polishing the npm release + ### 0.1.0 - initial release diff --git a/package.json b/package.json index 0df91bd..3bdcc8e 100644 --- a/package.json +++ b/package.json @@ -1,9 +1,15 @@ { "name": "n8n-nodes-gpt-tokenizer", - "version": "0.1.0", - "description": "A n8n node to Tokenize the Input into byte pair sequence of integers in similar fashion as GPT models of OpenAI does", + "version": "0.1.1", + "description": "n8n node for working with BPE Tokens with OpenAI's GPT models in mind.", "keywords": [ - "n8n-community-node-package" + "n8n-community-node-package", + "n8n", + "nodemation", + "n8n-node", + "gpt", + "bpe", + "tokenizer" ], "license": "MIT", "homepage": "",