File size: 2,501 Bytes
5e2a9f5
 
 
1187826
 
 
 
 
 
 
 
 
 
 
 
 
 
c1627db
 
1187826
 
 
c04aa50
1187826
 
 
 
 
 
 
 
 
 
 
 
 
5e2a9f5
 
 
 
 
 
1187826
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5e2a9f5
1187826
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import gradio as gr
from lmdeploy.serve.gradio.app import *

async def chat_stream_test(
    instruction: str,
    state_chatbot: Sequence,
    cancel_btn: gr.Button,
    reset_btn: gr.Button,
    request: gr.Request,
):
    """Chat with AI assistant.

    Args:
        instruction (str): user's prompt
        state_chatbot (Sequence): the chatting history
        request (gr.Request): the request from a user
    """
    yield (state_chatbot, state_chatbot, disable_btn, disable_btn,
           '')
    session_id = threading.current_thread().ident
    if request is not None:
        session_id = int(request.kwargs['client']['host'].replace('.', ''))
        print(request.kwargs['client'])
    else:
        print('Warning, could not get a request ip')
    print(f'session_id {session_id}')
    bot_summarized_response = ''
    state_chatbot = state_chatbot + [(instruction, None)]

    yield (state_chatbot, state_chatbot, disable_btn, enable_btn,
           f'{bot_summarized_response}'.strip())

    yield (state_chatbot, state_chatbot, disable_btn, enable_btn,
           f'{bot_summarized_response}'.strip())


with gr.Blocks(css=CSS, theme=THEME) as demo:
    state_chatbot = gr.State([])

    with gr.Column(elem_id='container'):
        gr.Markdown('## LMDeploy Playground')

        chatbot = gr.Chatbot(
            elem_id='chatbot',
            label="test")
        instruction_txtbox = gr.Textbox(
            placeholder='Please input the instruction',
            label='Instruction')
        with gr.Row():
            cancel_btn = gr.Button(value='Cancel', interactive=False)
            reset_btn = gr.Button(value='Reset')

    send_event = instruction_txtbox.submit(
        chat_stream_test,
        [instruction_txtbox, state_chatbot, cancel_btn, reset_btn],
        [state_chatbot, chatbot, cancel_btn, reset_btn])
    instruction_txtbox.submit(
        lambda: gr.Textbox.update(value=''),
        [],
        [instruction_txtbox],
    )
    cancel_btn.click(cancel_local_func,
                        [state_chatbot, cancel_btn, reset_btn],
                        [state_chatbot, cancel_btn, reset_btn],
                        cancels=[send_event])

    reset_btn.click(reset_local_func, [instruction_txtbox, state_chatbot],
                    [state_chatbot, chatbot, instruction_txtbox],
                    cancels=[send_event])

# print(f'server is gonna mount on: http://{server_name}:{server_port}')
    demo.queue(concurrency_count=4, max_size=100).launch()