Skip to content

Commit 041e217

Browse files
reidliu41lulmer
authored andcommitted
[Misc] improve example script output (vllm-project#15528)
Signed-off-by: reidliu41 <reid201711@gmail.com> Co-authored-by: reidliu41 <reid201711@gmail.com> Signed-off-by: Louis Ulmer <ulmerlouis@gmail.com>
1 parent 01484d1 commit 041e217

File tree

5 files changed

+16
-6
lines changed

5 files changed

+16
-6
lines changed

examples/offline_inference/basic/basic.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,10 @@
1818
# that contain the prompt, generated text, and other information.
1919
outputs = llm.generate(prompts, sampling_params)
2020
# Print the outputs.
21+
print("\nGenerated Outputs:\n" + "-" * 60)
2122
for output in outputs:
2223
prompt = output.prompt
2324
generated_text = output.outputs[0].text
24-
print(f"Prompt: {prompt!r}, Generated text: {generated_text!r}")
25+
print(f"Prompt: {prompt!r}")
26+
print(f"Output: {generated_text!r}")
27+
print("-" * 60)

examples/offline_inference/basic/chat.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,13 @@ def main(args: dict):
2727
sampling_params.top_k = top_k
2828

2929
def print_outputs(outputs):
30+
print("\nGenerated Outputs:\n" + "-" * 80)
3031
for output in outputs:
3132
prompt = output.prompt
3233
generated_text = output.outputs[0].text
33-
print(f"Prompt: {prompt!r}")
34+
print(f"Prompt: {prompt!r}\n")
3435
print(f"Generated text: {generated_text!r}")
35-
print("-" * 80)
36+
print("-" * 80)
3637

3738
print("=" * 80)
3839

examples/offline_inference/basic/classify.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,14 @@ def main(args: Namespace):
2323
outputs = model.classify(prompts)
2424

2525
# Print the outputs.
26+
print("\nGenerated Outputs:\n" + "-" * 60)
2627
for prompt, output in zip(prompts, outputs):
2728
probs = output.outputs.probs
2829
probs_trimmed = ((str(probs[:16])[:-1] +
2930
", ...]") if len(probs) > 16 else probs)
30-
print(f"Prompt: {prompt!r} | "
31+
print(f"Prompt: {prompt!r} \n"
3132
f"Class Probabilities: {probs_trimmed} (size={len(probs)})")
33+
print("-" * 60)
3234

3335

3436
if __name__ == "__main__":

examples/offline_inference/basic/embed.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,14 @@ def main(args: Namespace):
2323
outputs = model.embed(prompts)
2424

2525
# Print the outputs.
26+
print("\nGenerated Outputs:\n" + "-" * 60)
2627
for prompt, output in zip(prompts, outputs):
2728
embeds = output.outputs.embedding
2829
embeds_trimmed = ((str(embeds[:16])[:-1] +
2930
", ...]") if len(embeds) > 16 else embeds)
30-
print(f"Prompt: {prompt!r} | "
31+
print(f"Prompt: {prompt!r} \n"
3132
f"Embeddings: {embeds_trimmed} (size={len(embeds)})")
33+
print("-" * 60)
3234

3335

3436
if __name__ == "__main__":

examples/offline_inference/basic/score.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,11 @@ def main(args: Namespace):
2222
outputs = model.score(text_1, texts_2)
2323

2424
# Print the outputs.
25+
print("\nGenerated Outputs:\n" + "-" * 60)
2526
for text_2, output in zip(texts_2, outputs):
2627
score = output.outputs.score
27-
print(f"Pair: {[text_1, text_2]!r} | Score: {score}")
28+
print(f"Pair: {[text_1, text_2]!r} \nScore: {score}")
29+
print("-" * 60)
2830

2931

3032
if __name__ == "__main__":

0 commit comments

Comments
 (0)