Tuchuanhuhuhu commited on
Commit
293b255
·
1 Parent(s): 539d20b

tamporal fix to stuck outputing

Browse files
Files changed (1) hide show
  1. modules/overwrites.py +67 -73
modules/overwrites.py CHANGED
@@ -1,79 +1,69 @@
1
  from __future__ import annotations
2
- import logging
3
 
4
- from typing import List, Tuple
5
- from gradio_client import utils as client_utils
6
- from gradio import utils
7
  import inspect
8
 
9
- from modules.presets import *
10
- from modules.index_func import *
 
 
 
 
11
 
12
 
13
  def postprocess(
14
- self,
15
- y: List[List[str | Tuple[str] | Tuple[str, str] | None] | Tuple],
16
- ) -> List[List[str | Dict | None]]:
17
- """
18
- Parameters:
19
- y: List of lists representing the message and response pairs. Each message and response should be a string, which may be in Markdown format. It can also be a tuple whose first element is a string filepath or URL to an image/video/audio, and second (optional) element is the alt text, in which case the media file is displayed. It can also be None, in which case that message is not displayed.
20
- Returns:
21
- List of lists representing the message and response. Each message and response will be a string of HTML, or a dictionary with media information. Or None if the message is not to be displayed.
22
- """
23
- if y is None:
24
- return []
25
- processed_messages = []
26
- for message_pair in y:
27
- assert isinstance(
28
- message_pair, (tuple, list)
29
- ), f"Expected a list of lists or list of tuples. Received: {message_pair}"
30
- assert (
31
- len(message_pair) == 2
32
- ), f"Expected a list of lists of length 2 or list of tuples of length 2. Received: {message_pair}"
33
-
34
- processed_messages.append(
35
- [
36
- self._postprocess_chat_messages(message_pair[0], "user"),
37
- self._postprocess_chat_messages(message_pair[1], "bot"),
38
- ]
39
  )
40
- return processed_messages
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  def postprocess_chat_messages(
43
- self, chat_message: str | tuple | list | None, role: str
44
- ) -> str | dict | None:
45
- if chat_message is None:
46
- return None
47
- else:
48
- if isinstance(chat_message, (tuple, list)):
49
- if len(chat_message) > 0 and "text" in chat_message[0]:
50
- chat_message = chat_message[0]["text"]
51
- else:
52
- file_uri = chat_message[0]
53
- if utils.validate_url(file_uri):
54
- filepath = file_uri
55
- else:
56
- filepath = self.make_temp_copy_if_needed(file_uri)
57
-
58
- mime_type = client_utils.get_mimetype(filepath)
59
- return {
60
- "name": filepath,
61
- "mime_type": mime_type,
62
- "alt_text": chat_message[1] if len(chat_message) > 1 else None,
63
- "data": None, # These last two fields are filled in by the frontend
64
- "is_file": True,
65
- }
66
- if isinstance(chat_message, str):
67
- # chat_message = inspect.cleandoc(chat_message)
68
- # escape html spaces
69
- # chat_message = chat_message.replace(" ", " ")
70
- if role == "bot":
71
- chat_message = convert_bot_before_marked(chat_message)
72
- elif role == "user":
73
- chat_message = convert_user_before_marked(chat_message)
74
- return chat_message
75
- else:
76
- raise ValueError(f"Invalid message for Chatbot component: {chat_message}")
77
 
78
 
79
  def init_with_class_name_as_elem_classes(original_func):
@@ -86,18 +76,22 @@ def init_with_class_name_as_elem_classes(original_func):
86
  kwargs["elem_classes"].append("gradio-" + self.__class__.__name__.lower())
87
 
88
  if kwargs.get("multiselect", False):
89
- kwargs["elem_classes"].append('multiselect')
90
 
91
  res = original_func(self, *args, **kwargs)
92
  return res
 
93
  return wrapper
94
 
 
95
  def patch_gradio():
96
- original_Component_init = gr.components.Component.__init__
97
- gr.components.Component.__init__ = init_with_class_name_as_elem_classes(original_Component_init)
98
- gr.components.FormComponent.__init__ = init_with_class_name_as_elem_classes(original_Component_init)
99
 
100
- gr.blocks.BlockContext.__init__ = init_with_class_name_as_elem_classes(gr.blocks.BlockContext.__init__)
 
 
101
 
102
- # gr.Chatbot._postprocess_chat_messages = postprocess_chat_messages
103
- # gr.Chatbot.postprocess = postprocess
 
1
  from __future__ import annotations
 
2
 
 
 
 
3
  import inspect
4
 
5
+ import gradio as gr
6
+ from gradio.components.chatbot import ChatbotData, FileMessage
7
+ from gradio.data_classes import FileData
8
+ from gradio_client import utils as client_utils
9
+
10
+ from modules.utils import convert_bot_before_marked, convert_user_before_marked
11
 
12
 
13
  def postprocess(
14
+ self,
15
+ value: list[list[str | tuple[str] | tuple[str, str] | None] | tuple] | None,
16
+ ) -> ChatbotData:
17
+ """
18
+ Parameters:
19
+ value: expects a `list[list[str | None | tuple]]`, i.e. a list of lists. The inner list should have 2 elements: the user message and the response message. The individual messages can be (1) strings in valid Markdown, (2) tuples if sending files: (a filepath or URL to a file, [optional string alt text]) -- if the file is image/video/audio, it is displayed in the Chatbot, or (3) None, in which case the message is not displayed.
20
+ Returns:
21
+ an object of type ChatbotData
22
+ """
23
+ if value is None:
24
+ return ChatbotData(root=[])
25
+ processed_messages = []
26
+ for message_pair in value:
27
+ if not isinstance(message_pair, (tuple, list)):
28
+ raise TypeError(
29
+ f"Expected a list of lists or list of tuples. Received: {message_pair}"
 
 
 
 
 
 
 
 
 
30
  )
31
+ if len(message_pair) != 2:
32
+ raise TypeError(
33
+ f"Expected a list of lists of length 2 or list of tuples of length 2. Received: {message_pair}"
34
+ )
35
+ processed_messages.append(
36
+ [
37
+ self._postprocess_chat_messages(message_pair[0], "user"),
38
+ self._postprocess_chat_messages(message_pair[1], "bot"),
39
+ ]
40
+ )
41
+ return ChatbotData(root=processed_messages)
42
+
43
 
44
  def postprocess_chat_messages(
45
+ self, chat_message: str | tuple | list | None, role: str
46
+ ) -> str | FileMessage | None:
47
+ if chat_message is None:
48
+ return None
49
+ elif isinstance(chat_message, (tuple, list)):
50
+ filepath = str(chat_message[0])
51
+
52
+ mime_type = client_utils.get_mimetype(filepath)
53
+ return FileMessage(
54
+ file=FileData(path=filepath, mime_type=mime_type),
55
+ alt_text=chat_message[1] if len(chat_message) > 1 else None,
56
+ )
57
+ elif isinstance(chat_message, str):
58
+ # chat_message = inspect.cleandoc(chat_message)
59
+ if role == "bot":
60
+ chat_message = inspect.cleandoc(chat_message)
61
+ # chat_message = convert_bot_before_marked(chat_message)
62
+ elif role == "user":
63
+ chat_message = convert_user_before_marked(chat_message)
64
+ return chat_message
65
+ else:
66
+ raise ValueError(f"Invalid message for Chatbot component: {chat_message}")
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
 
69
  def init_with_class_name_as_elem_classes(original_func):
 
76
  kwargs["elem_classes"].append("gradio-" + self.__class__.__name__.lower())
77
 
78
  if kwargs.get("multiselect", False):
79
+ kwargs["elem_classes"].append("multiselect")
80
 
81
  res = original_func(self, *args, **kwargs)
82
  return res
83
+
84
  return wrapper
85
 
86
+
87
  def patch_gradio():
88
+ gr.components.Component.__init__ = init_with_class_name_as_elem_classes(
89
+ gr.components.Component.__init__
90
+ )
91
 
92
+ gr.blocks.BlockContext.__init__ = init_with_class_name_as_elem_classes(
93
+ gr.blocks.BlockContext.__init__
94
+ )
95
 
96
+ gr.Chatbot._postprocess_chat_messages = postprocess_chat_messages
97
+ gr.Chatbot.postprocess = postprocess