import gradio as gr import openai import pytesseract import random import os import time Init_system_prompt = "You are an AI Assistant that tries to teach kids various subjects. You are given learning material and you task is to ask questions given the material and then you also grade answers and give feedback how to improve the answers" system_message = {"role": "system", "content": Init_system_prompt} system_prompts = { "English": "You are an AI Assistant that tries to teach kids various subjects. You are given learning material and you task is to ask questions given the material and then you also grade answers and give feedback how to improve the answers", "Finnish": "Olet tekoälyavustaja jonka tehtävänä on auttaa lapsia oppimaan koulussa. Sinulle annetaan oppimateriaalia tekstinä ja sinun tehtäväsi on kysyä kysymyksiä annetusta tekstistä, arvostella vastauksia ja antaa palautetta kuinka vastauksia voidaan parantaa." } question_strings = { "English": "\n Please ask a question about the previous paragramph: Question:", "Finnish": "\n Kysy kysymys edellisen kappaleen perusteella. Kysymys:", } lang_mapping = { "English": "en", "Finnish": "fin" } import os from PIL import Image import pytesseract #os.system("rm -f path.txt") path = os.system("which tesseract >> path.txt") with open("path.txt", 'r') as file: tesseract_path = file.read().replace('\n', '') ########### TAB 1 (UPLOAD) FUNCTIONS ############################# def print_files(files): for file in files: print(file.__dir__()) print(file.name) print(file.file) def create_data(files, language_selection): question_context = '' for file in files: if file.name.endswith('png') or file.name.endswith('.jpg'): try: question_context += (pytesseract.image_to_string(Image.open(file.name), lang=lang_mapping[language_selection])) + '\n\n' except Exception as e: print(e) pass system_prompt = system_prompts[language_selection] return question_context, system_prompt ########### TAB 3 (CHAT) FUNCTIONS ############################# def user(user_message, history): return "", history + [[user_message, None]] def bot(history, messages_history, api_key, system_prompt, teksti_contexti, temperature, max_tokens, chatgpt_model, max_context_size_for_question, language_selection): user_message = history[-1][0] bot_message, messages_history = ask_gpt(user_message, messages_history, api_key, system_prompt, teksti_contexti, temperature, max_tokens, chatgpt_model, max_context_size_for_question, language_selection) messages_history += [{"role": "assistant", "content": bot_message}] history[-1][1] = bot_message time.sleep(0.2) return history, messages_history, str(messages_history) def ask_gpt(message, messages_history, api_key, system_prompt, context, temperature, max_tokens, chatgpt_model, max_context_size_for_question, language_selection): messages_history, system_prompt, _ = init_history(messages_history, system_prompt) if len(messages_history) < 1: messages_history = [{"role": "system", "content": system_prompt}] max_possible_position = len(context)- max_context_size_for_question start = random.randint(0,max_possible_position) messages_history += [{"role": "user", "content": context[start:start+max_context_size_for_question] + question_strings[language_selection]}] openai.api_key = api_key response = openai.ChatCompletion.create( model=chatgpt_model, messages=messages_history, temperature=temperature, max_tokens=max_tokens ) return response['choices'][0]['message']['content'], messages_history def init_history(messages_history, system_prompt): messages_history = [] messages_history += [{"role": "system", "content": system_prompt}] msg_log = gr.Textbox.update(value="Tähän tulee message history") system_prompt = gr.Textbox.update(value=system_prompt, label='Insert system message here') return messages_history, system_prompt, msg_log ############# INTERFACE ########################## with gr.Blocks() as demo: gr.Markdown("ChatGPT demo with RAG using Chromadb") ############# TAB 1 ########################## with gr.Tab("Upload documents and create context"): with gr.Row(): api_key = gr.Textbox(value='', type='password', label='Insert OPENAI API-key here') with gr.Row(): language_selection = gr.Dropdown(value='English', choices=["English","Finnish"], label='Select language') files = gr.File(file_count='multiple', file_types=['image'], interactivate = True) create_context_btn = gr.Button(value='Recognize text and create context') with gr.Row(): gr.Markdown("") with gr.Row(): teksti_contexti = gr.Textbox(value='Tähän tulee konteksti', label='Created context') ############# TAB 3 ########################## with gr.Tab("Chat"): gr.Markdown("""

ChatGPT ChatBot with Gradio and OpenAI

""") with gr.Row(): system_prompt = gr.Textbox(value=Init_system_prompt, label='Insert system message here') chatgpt_model = gr.Dropdown(choices=["gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-3.5-turbo-0613"], value='gpt-3.5-turbo',label='ChatGPT model to use', interactive=True) temperature = gr.Slider(minimum=0.0, maximum=1.0, step=0.05, value=0.7, label='temperature') max_tokens = gr.Slider(minimum=10, maximum=600, step=10, value=100, label='Max tokens') max_context_size_for_question = gr.Slider(minimum=10, maximum=600, step=10, value=100, label='Max context for questions') with gr.Row(): chatbot = gr.Chatbot(label='ChatGPT Chat') state = gr.State([]) with gr.Row(): msg = gr.Textbox() with gr.Row(): clear = gr.Button("Clear") with gr.Row(): msg_log = gr.Textbox("Tähän tulee message history", label='Message history') with gr.Accordion("Klikkaa avataksesi ohjeet"): gr.Markdown("Ohjeet tulee tänne") # TAB 1 (UPLOAD) Interactive elements: create_context_btn.click(create_data, [files, language_selection], [teksti_contexti, system_prompt]) # TAB 3 (CHAT) Interactive elements: msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( bot, [chatbot, state, api_key, system_prompt, teksti_contexti, temperature, max_tokens, chatgpt_model, max_context_size_for_question, language_selection], [chatbot, state, msg_log] ) clear.click(lambda: None, None, chatbot, queue=False).success(init_history, [state, system_prompt], [state, system_prompt, msg_log]) demo.launch(debug=True)