ermu2001 commited on
Commit
7c41aef
1 Parent(s): afc99d0

Update tasks/eval/model_utils.py

Browse files
Files changed (1) hide show
  1. tasks/eval/model_utils.py +2 -0
tasks/eval/model_utils.py CHANGED
@@ -8,6 +8,7 @@ from tasks.eval.eval_utils import Conversation
8
  from models.pllava import PllavaProcessor, PllavaForConditionalGeneration, PllavaConfig
9
  from accelerate import init_empty_weights, dispatch_model, infer_auto_device_map,load_checkpoint_in_model
10
  from accelerate.utils import get_balanced_memory
 
11
 
12
  from transformers import StoppingCriteria
13
  class KeywordsStoppingCriteria(StoppingCriteria):
@@ -136,6 +137,7 @@ def load_adapters(model, adapter_model_name_or_paths):
136
  return model
137
 
138
 
 
139
  def pllava_answer(conv: Conversation, model, processor, img_list, do_sample=True, max_new_tokens=200, num_beams=1, min_length=1, top_p=0.9,
140
  repetition_penalty=1.0, length_penalty=1, temperature=1.0, stop_criteria_keywords=None, print_res=False):
141
  # torch.cuda.empty_cache()
 
8
  from models.pllava import PllavaProcessor, PllavaForConditionalGeneration, PllavaConfig
9
  from accelerate import init_empty_weights, dispatch_model, infer_auto_device_map,load_checkpoint_in_model
10
  from accelerate.utils import get_balanced_memory
11
+ import spaces
12
 
13
  from transformers import StoppingCriteria
14
  class KeywordsStoppingCriteria(StoppingCriteria):
 
137
  return model
138
 
139
 
140
+ @spaces.GPU(duration=120)
141
  def pllava_answer(conv: Conversation, model, processor, img_list, do_sample=True, max_new_tokens=200, num_beams=1, min_length=1, top_p=0.9,
142
  repetition_penalty=1.0, length_penalty=1, temperature=1.0, stop_criteria_keywords=None, print_res=False):
143
  # torch.cuda.empty_cache()