yasserrmd commited on
Commit
3d3237e
1 Parent(s): 3e35554

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -20,7 +20,7 @@ model = YOLOv10.from_pretrained('jameslahm/yolov10x').to(device)
20
 
21
  model_id = "meta-llama/Llama-3.2-11B-Vision-Instruct"
22
 
23
- model = MllamaForConditionalGeneration.from_pretrained(
24
  model_id,
25
  torch_dtype=torch.bfloat16,
26
  device_map="auto",
@@ -104,7 +104,7 @@ def generate_journal_infographics(journal):
104
  model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
105
 
106
  # Generate the documentation
107
- generated_ids = model.generate(**model_inputs, max_new_tokens=4000)
108
  generated_ids = [output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)]
109
  documentation = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
110
  print(documentation)
 
20
 
21
  model_id = "meta-llama/Llama-3.2-11B-Vision-Instruct"
22
 
23
+ model_code = MllamaForConditionalGeneration.from_pretrained(
24
  model_id,
25
  torch_dtype=torch.bfloat16,
26
  device_map="auto",
 
104
  model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
105
 
106
  # Generate the documentation
107
+ generated_ids = model_code.generate(**model_inputs, max_new_tokens=4000)
108
  generated_ids = [output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)]
109
  documentation = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
110
  print(documentation)