Skip to content

Commit

Permalink
refactor(models, dependencies): remove unused max_tokens variable and…
Browse files Browse the repository at this point in the history
… update dependency versions and Python compatibility in pdm.lock
  • Loading branch information
yufeikang committed Dec 4, 2024
1 parent 23c2b30 commit 7d08ad0
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 15 deletions.
5 changes: 0 additions & 5 deletions app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

logger = logging.getLogger(__name__)

MAX_TOKENS = os.environ.get("MAX_TOKENS", 1024)
DEFAULT_MODEL = os.environ.get("DEFAULT_MODEL")


Expand Down Expand Up @@ -282,7 +281,6 @@ async def __chat(self, messages, model, temperature, **kwargs):
res = await self.openai_client.chat.completions.create(
model=model,
messages=messages,
max_tokens=MAX_TOKENS,
n=1,
temperature=temperature,
# stream=stream,
Expand Down Expand Up @@ -433,7 +431,6 @@ def __generate_content(
stream=stream,
generation_config=genai.types.GenerationConfig(
candidate_count=1,
max_output_tokens=MAX_TOKENS,
temperature=temperature,
),
)
Expand Down Expand Up @@ -486,7 +483,6 @@ async def chat_completions(self, raycast_data: dict):
response = await self.anthropic_client.messages.create(
model=model,
messages=messages,
max_tokens=MAX_TOKENS,
temperature=temperature,
stream=True,
)
Expand Down Expand Up @@ -534,7 +530,6 @@ async def translate_completions(self, raycast_data: dict):
response = await self.anthropic_client.messages.create(
model=model,
messages=messages,
max_tokens=MAX_TOKENS,
temperature=0.8,
stream=True,
)
Expand Down
44 changes: 34 additions & 10 deletions pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 7d08ad0

Please sign in to comment.