Skip to content

Commit

Permalink
feat: add prompt-format list
Browse files Browse the repository at this point in the history
Signed-off-by: reidliu <[email protected]>
  • Loading branch information
reidliu committed Feb 23, 2025
1 parent 19ae4b3 commit 4f74a7e
Showing 1 changed file with 21 additions and 1 deletion.
22 changes: 21 additions & 1 deletion llama_stack/cli/model/prompt_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from io import StringIO

from llama_stack.cli.subcommand import Subcommand
from llama_stack.cli.table import print_table
from llama_stack.models.llama.datatypes import CoreModelId, ModelFamily, is_multimodal, model_family


Expand Down Expand Up @@ -48,7 +49,26 @@ def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
supported_model_ids = [
m for m in CoreModelId if model_family(m) in {ModelFamily.llama3_1, ModelFamily.llama3_2}
]
model_str = "\n".join([m.value for m in supported_model_ids])

model_list = [m.value for m in supported_model_ids]
model_str = "\n".join(model_list)

if args.list:
headers = ["Model"]
rows = []
for m in model_list:
rows.append(
[
m,
]
)
print_table(
rows,
headers,
separate_rows=True,
)
return

try:
model_id = CoreModelId(args.model_name)
except ValueError:
Expand Down

0 comments on commit 4f74a7e

Please sign in to comment.