gabrielmotablima
commited on
Commit
•
fd2f9bd
1
Parent(s):
cfa3d89
Update README.md
Browse files
README.md
CHANGED
@@ -72,7 +72,7 @@ model = VisionEncoderDecoderModel.from_pretrained("laicsiifes/swin-gportuguese-2
|
|
72 |
tokenizer = AutoTokenizer.from_pretrained("laicsiifes/swin-gportuguese-2")
|
73 |
image_processor = AutoImageProcessor.from_pretrained("laicsiifes/swin-gportuguese-2")
|
74 |
|
75 |
-
#
|
76 |
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
|
77 |
image = Image.open(requests.get(url, stream=True).raw)
|
78 |
pixel_values = image_processor(image, return_tensors="pt").pixel_values
|
@@ -80,9 +80,20 @@ pixel_values = image_processor(image, return_tensors="pt").pixel_values
|
|
80 |
# generate caption
|
81 |
generated_ids = model.generate(pixel_values)
|
82 |
generated_text = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
|
83 |
-
print(generated_text)
|
84 |
```
|
85 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
## 📈 Results
|
87 |
|
88 |
The evaluation metrics Cider-D, BLEU@4, ROUGE-L, METEOR and BERTScore
|
|
|
72 |
tokenizer = AutoTokenizer.from_pretrained("laicsiifes/swin-gportuguese-2")
|
73 |
image_processor = AutoImageProcessor.from_pretrained("laicsiifes/swin-gportuguese-2")
|
74 |
|
75 |
+
# preprocess an image
|
76 |
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
|
77 |
image = Image.open(requests.get(url, stream=True).raw)
|
78 |
pixel_values = image_processor(image, return_tensors="pt").pixel_values
|
|
|
80 |
# generate caption
|
81 |
generated_ids = model.generate(pixel_values)
|
82 |
generated_text = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
|
|
|
83 |
```
|
84 |
|
85 |
+
```python
|
86 |
+
import matplotlib.pyplot as plt
|
87 |
+
|
88 |
+
# plot image with caption
|
89 |
+
plt.imshow(image)
|
90 |
+
plt.axis("off")
|
91 |
+
plt.title(generated_text)
|
92 |
+
plt.show()
|
93 |
+
```
|
94 |
+
|
95 |
+
![image/png](https://cdn-uploads.huggingface.co/production/uploads/637a149c0dbdecf0b5bd6490/tl1YYtNIx4CCCKDPwKgk1.png)
|
96 |
+
|
97 |
## 📈 Results
|
98 |
|
99 |
The evaluation metrics Cider-D, BLEU@4, ROUGE-L, METEOR and BERTScore
|