@@ -215,7 +215,7 @@ public enum ChatTemplateArgument {
215215///
216216/// This is the main protocol that defines all tokenizer operations, including text processing,
217217/// chat template application, and special token handling.
218- public protocol Tokenizer {
218+ public protocol Tokenizer : Sendable {
219219 /// Tokenizes the input text into a sequence of tokens.
220220 ///
221221 /// - Parameter text: The input text to tokenize
@@ -451,7 +451,7 @@ let specialTokenAttributes: [String] = [
451451/// This class provides a complete tokenizer implementation that can be initialized from
452452/// Hugging Face Hub configuration files and supports all standard tokenization operations
453453/// including chat template application, normalization, pre-tokenization, and post-processing.
454- public class PreTrainedTokenizer : Tokenizer {
454+ public class PreTrainedTokenizer : @ unchecked Sendable , Tokenizer {
455455 let model : TokenizingModel
456456
457457 public var bosToken : String ? { model. bosToken }
@@ -905,7 +905,7 @@ public extension AutoTokenizer {
905905
906906// MARK: - Tokenizer model classes
907907
908- class T5Tokenizer : UnigramTokenizer { }
908+ class T5Tokenizer : UnigramTokenizer , @ unchecked Sendable { }
909909
910910// MARK: - PreTrainedTokenizer classes
911911
@@ -954,7 +954,7 @@ func maybeUpdatePostProcessor(tokenizerConfig: Config, processorConfig: Config?)
954954}
955955
956956/// See https://github.com/xenova/transformers.js/blob/1a9964fb09b8f54fcbeac46dc6aae8d76795809d/src/tokenizers.js#L3203 for these exceptions
957- class LlamaPreTrainedTokenizer : PreTrainedTokenizer {
957+ class LlamaPreTrainedTokenizer : PreTrainedTokenizer , @ unchecked Sendable {
958958 let isLegacy : Bool
959959
960960 required init ( tokenizerConfig: Config , tokenizerData: Config , strict: Bool = true ) throws {
0 commit comments