|
import gradio as gr |
|
from transformers import GPT2LMHeadModel, GPT2Tokenizer |
|
import random |
|
|
|
|
|
tokenizer = GPT2Tokenizer.from_pretrained("aubmindlab/aragpt2-medium") |
|
model = GPT2LMHeadModel.from_pretrained("aubmindlab/aragpt2-medium") |
|
|
|
|
|
harf_muqattaat = ['أ', 'ل', 'م', 'ص', 'ر', 'ك', 'ه', 'ي', 'ع', 'ط', 'س', 'ح', 'ق', 'ن'] |
|
|
|
|
|
def generate_words_from_muqattaat(): |
|
|
|
length = random.randint(2, len(harf_muqattaat)) |
|
|
|
random.shuffle(harf_muqattaat) |
|
|
|
input_text = ''.join(harf_muqattaat[:length]) |
|
|
|
|
|
inputs = tokenizer.encode(input_text, return_tensors="pt") |
|
|
|
|
|
outputs = model.generate(inputs, max_length=20, num_return_sequences=1, no_repeat_ngram_size=2) |
|
|
|
|
|
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
return generated_text |
|
|
|
|
|
iface = gr.Interface(fn=generate_words_from_muqattaat, inputs=None, outputs="text", title="توليد كلمات عربية باستخدام الحروف المقطعة") |
|
|
|
|
|
iface.launch() |