diff --git a/src/transformers/tokenization_utils_fast.py b/src/transformers/tokenization_utils_fast.py index 5d238a5715ffd2..1243c3944ebd6c 100644 --- a/src/transformers/tokenization_utils_fast.py +++ b/src/transformers/tokenization_utils_fast.py @@ -648,6 +648,8 @@ def _decode( if isinstance(token_ids, int): token_ids = [token_ids] + if token_ids and isinstance(token_ids[0], float): + token_ids = [int(_id) for _id in token_ids] text = self._tokenizer.decode(token_ids, skip_special_tokens=skip_special_tokens) clean_up_tokenization_spaces = (