remove trainer
Browse files- __pycache__/Scraper.cpython-310.pyc +0 -0
- app.py +4 -4
__pycache__/Scraper.cpython-310.pyc
ADDED
Binary file (654 Bytes). View file
|
|
app.py
CHANGED
@@ -3,7 +3,7 @@ import numpy as np
|
|
3 |
import re
|
4 |
import time
|
5 |
|
6 |
-
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
7 |
from Scraper import Scrap
|
8 |
|
9 |
st.set_page_config(layout="wide")
|
@@ -15,7 +15,7 @@ label = {0: "valid", 1: "fake"}
|
|
15 |
def load_model():
|
16 |
model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint, num_labels=2)
|
17 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, fast=True)
|
18 |
-
return
|
19 |
|
20 |
def sigmoid(x):
|
21 |
return 1 / (1 + np.exp(-x))
|
@@ -50,9 +50,9 @@ if submit:
|
|
50 |
for i in range(text_len // 512):
|
51 |
sequences.append(" ".join(text[i * 512: (i + 1) * 512]))
|
52 |
sequences.append(" ".join(text[text_len - (text_len % 512) : text_len]))
|
53 |
-
sequences =
|
54 |
|
55 |
-
predictions = model
|
56 |
result = [
|
57 |
np.sum([sigmoid(i[0]) for i in predictions]) / len(predictions),
|
58 |
np.sum([sigmoid(i[1]) for i in predictions]) / len(predictions)
|
|
|
3 |
import re
|
4 |
import time
|
5 |
|
6 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
7 |
from Scraper import Scrap
|
8 |
|
9 |
st.set_page_config(layout="wide")
|
|
|
15 |
def load_model():
|
16 |
model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint, num_labels=2)
|
17 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, fast=True)
|
18 |
+
return model, tokenizer
|
19 |
|
20 |
def sigmoid(x):
|
21 |
return 1 / (1 + np.exp(-x))
|
|
|
50 |
for i in range(text_len // 512):
|
51 |
sequences.append(" ".join(text[i * 512: (i + 1) * 512]))
|
52 |
sequences.append(" ".join(text[text_len - (text_len % 512) : text_len]))
|
53 |
+
sequences = tokenizer(sequences, max_length=512, truncation=True, padding="max_length", return_tensors='pt')
|
54 |
|
55 |
+
predictions = model(**sequences)[0].detach().numpy()
|
56 |
result = [
|
57 |
np.sum([sigmoid(i[0]) for i in predictions]) / len(predictions),
|
58 |
np.sum([sigmoid(i[1]) for i in predictions]) / len(predictions)
|