Skip to content

Commit

Permalink
fix session.response
Browse files Browse the repository at this point in the history
  • Loading branch information
lvhan028 committed Jul 21, 2023
1 parent bb9991d commit cdcbd02
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 2 deletions.
2 changes: 1 addition & 1 deletion lmdeploy/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ class Llama2(BaseModel):
"""Chat template of LLaMA2 model."""

def __init__(self):

super().__init__()
B_INST, E_INST = '[INST]', '[/INST]'
B_SYS, E_SYS = '<<SYS>>\n', '\n<</SYS>>\n\n'

Expand Down
1 change: 0 additions & 1 deletion lmdeploy/serve/turbomind/chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -547,7 +547,6 @@ def stream_consumer(postprocess, res_queue, session, n_input_token,
except Exception as e:
logger.error(f'catch exception: {e}')

session.response = session.response[len(session.prompt):]
# put session back to queue so that `_stream_infer` can update it in
# `self.sessions`
while not res_queue.empty():
Expand Down

0 comments on commit cdcbd02

Please sign in to comment.