-
Notifications
You must be signed in to change notification settings - Fork 7
Description
for key error 20211: I have done this as explained
manually run formatting_func
formatted_texts = formatting_prompts_func(train_dataset)
formatted_dataset = datasets.Dataset.from_dict({
'text': formatted_texts,
'label': train_dataset['label']
})
trainer = SFTTrainer(
model = model,
tokenizer = tokenizer,
train_dataset = formatted_dataset,
max_seq_length = max_seq_length,
dataset_num_proc = 1,
packing = False, # not needed because group_by_length is True
args = TrainingArguments(
per_device_train_batch_size = 32,
gradient_accumulation_steps = 1,
warmup_steps = 10,
learning_rate = 1e-4,
fp16 = not torch.cuda.is_bf16_supported(),
bf16 = torch.cuda.is_bf16_supported(),
logging_steps = 1,
optim = "adamw_8bit",
weight_decay = 0.01,
lr_scheduler_type = "cosine",
seed = 3407,
output_dir = "outputs",
num_train_epochs = 1,
# report_to = "wandb",
report_to = "none",
group_by_length = True,
),
data_collator=collator,
dataset_text_field="text",
)
key error got resolved now landed on==>
/usr/local/lib/python3.10/dist-packages/unsloth/models/qwen3.py in Qwen3Attention_fast_forward(self, hidden_states, causal_mask, attention_mask, position_ids, past_key_value, output_attentions, use_cache, padding_mask, position_embeddings, *args, **kwargs)
135 Q_M = bsz * q_len
136
--> 137 has_swa = isinstance(causal_mask, xformers.attn_bias.BlockDiagonalCausalMask)
138
139 # Group query attention
AttributeError: 'NoneType' object has no attribute 'attn_bias'