Skip to content

Commit

Permalink
renaming and clarifying max_length parameter
Browse files Browse the repository at this point in the history
  • Loading branch information
daanelson committed Mar 17, 2023
1 parent c9407af commit d6d5bdd
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ def predict(
self,
prompt: str = Input(description=f"Prompt to send to LLaMA."),
n: int = Input(description="Number of output sequences to generate", default=1, ge=1, le=5),
max_length: int = Input(
description="Maximum number of tokens to generate. A word is generally 2-3 tokens",
total_tokens: int = Input(
description="Maximum number of tokens for input + generation. A word is generally 2-3 tokens",
ge=1,
default=50
default=2000
),
temperature: float = Input(
description="Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value.",
Expand All @@ -50,7 +50,7 @@ def predict(
outputs = self.model.generate(
input,
num_return_sequences=n,
max_length=max_length,
max_length=total_tokens,
do_sample=True,
temperature=temperature,
top_p=top_p,
Expand Down

0 comments on commit d6d5bdd

Please sign in to comment.