Got Error when training with Trainer
TypeError Traceback (most recent call last)
in <cell line: 0>()
62 trainer = Trainer(model=model, args=training_args, data_collator=custom_collate_func, train_dataset=test_dataset, eval_dataset=test_validation)
63
---> 64 result = trainer.train()
65 # --------------------------------------------------------------------------------------------------------------------------------------------------------------------
66 # Using Accelerator for Multi-GPU (future)
14 frames
/usr/local/lib/python3.11/dist-packages/transformers/trainer.py in train(self, resume_from_checkpoint, trial, ignore_keys_for_eval, **kwargs)
2162 hf_hub_utils.enable_progress_bars()
2163 else:
-> 2164 return inner_training_loop(
2165 args=args,
2166 resume_from_checkpoint=resume_from_checkpoint,
/usr/local/lib/python3.11/dist-packages/transformers/trainer.py in _inner_training_loop(self, batch_size, args, resume_from_checkpoint, trial, ignore_keys_for_eval)
2522 )
2523 with context():
-> 2524 tr_loss_step = self.training_step(model, inputs, num_items_in_batch)
2525
2526 if (
/usr/local/lib/python3.11/dist-packages/transformers/trainer.py in training_step(self, model, inputs, num_items_in_batch)
3652
3653 with self.compute_loss_context_manager():
-> 3654 loss = self.compute_loss(model, inputs, num_items_in_batch=num_items_in_batch)
3655
3656 del inputs
/usr/local/lib/python3.11/dist-packages/transformers/trainer.py in compute_loss(self, model, inputs, return_outputs, num_items_in_batch)
3706 loss_kwargs["num_items_in_batch"] = num_items_in_batch
3707 inputs = {**inputs, **loss_kwargs}
-> 3708 outputs = model(**inputs)
3709 # Save past state if it exists
3710 # TODO: this needs to be fixed and made cleaner later.
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
1737
1738 # torchrec tests the code consistency with the following code
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs)
1745 or _global_backward_pre_hooks or _global_backward_hooks
1746 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1747 return forward_call(*args, **kwargs)
1748
1749 result = None
/usr/local/lib/python3.11/dist-packages/peft/peft_model.py in forward(self, input_ids, attention_mask, inputs_embeds, labels, output_attentions, output_hidden_states, return_dict, task_ids, **kwargs)
1766 prompts = prompts.to(inputs_embeds.dtype)
1767 inputs_embeds = torch.cat((prompts, inputs_embeds), dim=1)
-> 1768 return self.base_model(inputs_embeds=inputs_embeds, **kwargs)
1769
1770 def _cpt_forward(
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
1737
1738 # torchrec tests the code consistency with the following code
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs)
1745 or _global_backward_pre_hooks or _global_backward_hooks
1746 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1747 return forward_call(*args, **kwargs)
1748
1749 result = None
/usr/local/lib/python3.11/dist-packages/peft/peft_model.py in forward(self, input_ids, attention_mask, inputs_embeds, labels, output_attentions, output_hidden_states, return_dict, task_ids, **kwargs)
1766 prompts = prompts.to(inputs_embeds.dtype)
1767 inputs_embeds = torch.cat((prompts, inputs_embeds), dim=1)
-> 1768 return self.base_model(inputs_embeds=inputs_embeds, **kwargs)
1769
1770 def _cpt_forward(
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
1737
1738 # torchrec tests the code consistency with the following code
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs)
1745 or _global_backward_pre_hooks or _global_backward_hooks
1746 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1747 return forward_call(*args, **kwargs)
1748
1749 result = None
/usr/local/lib/python3.11/dist-packages/peft/peft_model.py in forward(self, input_ids, attention_mask, inputs_embeds, labels, output_attentions, output_hidden_states, return_dict, task_ids, **kwargs)
1766 prompts = prompts.to(inputs_embeds.dtype)
1767 inputs_embeds = torch.cat((prompts, inputs_embeds), dim=1)
-> 1768 return self.base_model(inputs_embeds=inputs_embeds, **kwargs)
1769
1770 def _cpt_forward(
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
1737
1738 # torchrec tests the code consistency with the following code
/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs)
1745 or _global_backward_pre_hooks or _global_backward_hooks
1746 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1747 return forward_call(*args, **kwargs)
1748
1749 result = None
TypeError: OpenELMForCausalLM.forward() got an unexpected keyword argument 'num_items_in_batch'