Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,10 @@
|
|
1 |
import gradio as gr
|
2 |
-
from hazm import Normalizer, word_tokenize, Lemmatizer,
|
3 |
|
4 |
# Initialize Hazm components
|
5 |
normalizer = Normalizer()
|
6 |
lemmatizer = Lemmatizer()
|
7 |
-
tagger = POSTagger(model='resources/postagger.model')
|
8 |
chunker = Chunker(model='resources/chunker.model')
|
9 |
-
parser = DependencyParser(tagger=tagger, lemmatizer=lemmatizer)
|
10 |
|
11 |
def process_text(text, operations):
|
12 |
result = {}
|
@@ -19,20 +17,14 @@ def process_text(text, operations):
|
|
19 |
if 'lemmatize' in operations:
|
20 |
lemmas = [lemmatizer.lemmatize(token) for token in word_tokenize(text)]
|
21 |
result['Lemmas'] = lemmas
|
22 |
-
if 'pos_tag' in operations:
|
23 |
-
pos_tags = tagger.tag(word_tokenize(text))
|
24 |
-
result['POS Tags'] = pos_tags
|
25 |
if 'chunk' in operations:
|
26 |
-
pos_tags =
|
27 |
chunks = chunker.parse(pos_tags)
|
28 |
result['Chunks'] = str(chunks)
|
29 |
-
if 'dependency_parse' in operations:
|
30 |
-
parse_tree = parser.parse(word_tokenize(text))
|
31 |
-
result['Dependency Parse'] = str(parse_tree)
|
32 |
return result
|
33 |
|
34 |
# Define Gradio interface
|
35 |
-
operations = ['normalize', 'tokenize', 'lemmatize', '
|
36 |
iface = gr.Interface(
|
37 |
fn=process_text,
|
38 |
inputs=[
|
|
|
1 |
import gradio as gr
|
2 |
+
from hazm import Normalizer, word_tokenize, Lemmatizer, Chunker
|
3 |
|
4 |
# Initialize Hazm components
|
5 |
normalizer = Normalizer()
|
6 |
lemmatizer = Lemmatizer()
|
|
|
7 |
chunker = Chunker(model='resources/chunker.model')
|
|
|
8 |
|
9 |
def process_text(text, operations):
|
10 |
result = {}
|
|
|
17 |
if 'lemmatize' in operations:
|
18 |
lemmas = [lemmatizer.lemmatize(token) for token in word_tokenize(text)]
|
19 |
result['Lemmas'] = lemmas
|
|
|
|
|
|
|
20 |
if 'chunk' in operations:
|
21 |
+
pos_tags = word_tokenize(text)
|
22 |
chunks = chunker.parse(pos_tags)
|
23 |
result['Chunks'] = str(chunks)
|
|
|
|
|
|
|
24 |
return result
|
25 |
|
26 |
# Define Gradio interface
|
27 |
+
operations = ['normalize', 'tokenize', 'lemmatize', 'chunk']
|
28 |
iface = gr.Interface(
|
29 |
fn=process_text,
|
30 |
inputs=[
|