Skip to content

Commit

Permalink
Bugfix: Ensure logs are printed when streaming
Browse files Browse the repository at this point in the history
  • Loading branch information
abetlen committed May 10, 2023
1 parent 3c96b43 commit cdeaded
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions llama_cpp/llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,6 +709,9 @@ def _create_completion(
print("Llama._create_completion: cache save", file=sys.stderr)
self.cache[prompt_tokens + completion_tokens] = self.save_state()

if self.verbose:
llama_cpp.llama_print_timings(self.ctx)

if stream:
yield {
"id": completion_id,
Expand Down Expand Up @@ -780,9 +783,6 @@ def _create_completion(
"top_logprobs": top_logprobs,
}

if self.verbose:
llama_cpp.llama_print_timings(self.ctx)

yield {
"id": completion_id,
"object": "text_completion",
Expand Down

0 comments on commit cdeaded

Please sign in to comment.