Skip to content

Commit

Permalink
fix: max_past default value must be -1, not 0 (#1348)
Browse files Browse the repository at this point in the history
  • Loading branch information
OlivierDehaene authored Dec 15, 2023
1 parent 9b78a6e commit 37555cf
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def __init__(
):
super().__init__()
self.max_past = (
config.sliding_window if config.sliding_window is not None else 0
config.sliding_window if config.sliding_window is not None else -1
)
self.num_heads = config.num_attention_heads
self.hidden_size = config.hidden_size
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def __init__(
):
super().__init__()
self.max_past = (
config.sliding_window if config.sliding_window is not None else 0
config.sliding_window if config.sliding_window is not None else -1
)
self.num_heads = config.num_attention_heads
self.hidden_size = config.hidden_size
Expand Down
3 changes: 3 additions & 0 deletions server/text_generation_server/utils/flash_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ def attention(
softmax_scale,
window_size_left=-1,
):
if window_size_left <= 0 and window_size_left != -1:
raise ValueError("`window_size_left` must be > 0 or -1")

if HAS_FLASH_ATTN_V2_CUDA:
return flash_attn_2_cuda.varlen_fwd(
q,
Expand Down

0 comments on commit 37555cf

Please sign in to comment.