Skip to content

Commit

Permalink
Merge pull request #302 from HazyResearch/flash-fill-max-tokens
Browse files Browse the repository at this point in the history
add max tokens option to flash fill
  • Loading branch information
ad12 authored Mar 12, 2023
2 parents 4ee7f36 + f0b30f9 commit f6fe43f
Showing 1 changed file with 4 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ def __init__(
df: "DataFrame",
target_column: str,
manifest_cache_dir: str = "~/.cache/manifest",
max_tokens: int = 1,
):
self.max_tokens = max_tokens

df = df.view()
if target_column not in df.columns:
df[target_column] = ""
Expand Down Expand Up @@ -139,7 +142,7 @@ def run_manifest(
client_connection=os.getenv("OPENAI_API_KEY"),
engine=engine,
temperature=0,
max_tokens=1,
max_tokens=self.max_tokens,
cache_name="sqlite",
cache_connection=os.path.join(self.manifest_cache_dir, "cache.sqlite"),
)
Expand Down

0 comments on commit f6fe43f

Please sign in to comment.