gera commited on
Commit
4a99335
·
1 Parent(s): ba4f426

enough for today.

Browse files
Files changed (1) hide show
  1. app.py +26 -12
app.py CHANGED
@@ -6,6 +6,10 @@ from json import loads as json_loads
6
  from pathlib import Path
7
  import fitz
8
 
 
 
 
 
9
  api_key = os_getenv("OPENAI_APIKEY")
10
  client = OpenAI(api_key=api_key)
11
 
@@ -14,12 +18,12 @@ def get_prompt(books, question = None):
14
  f"Read the following books.\n" +
15
  f"Each book may have some pages at the beggining with data about the book, an index, or table of content, etc. " +
16
  f"Pages may have a header and/or a footer. Consider all this maybe present." +
17
- f"Please answer all below in the suggested format, in the language of the book:\n"+
18
  f"**Title**: ...\n"
19
  f"**Author**: ...\n"
20
  f"**Chapter Names**: ...\n"
21
  f"**Characters**: \n"
22
- f"**Detailed Summary**: \n"
23
  )
24
  prompt += f"{books}\n"
25
 
@@ -45,7 +49,7 @@ def chat(message, history, files):
45
  history_openai_format.append({"role": "user", "content": message})
46
 
47
  response = client.chat.completions.create(
48
- model='gpt-4-turbo',
49
  messages= history_openai_format,
50
  temperature=1.0,
51
  stream=True)
@@ -68,21 +72,31 @@ def get_text(filename):
68
 
69
  def files_ready(filenames):
70
  encoder = encoding = tiktoken.encoding_for_model('gpt-4-turbo')
71
- answer = ''
72
  for i, name in enumerate(filenames):
73
- answer += f"\n## Document #{i+1}\nName: {Path(name).name}\n"
74
- answer += get_text(name)
 
 
 
 
75
 
76
- return len(encoder.encode(answer)), [[get_prompt(answer), None]]
 
 
77
 
78
  def files_changed(filenames):
79
- if not filenames:
80
- return 0
 
 
81
 
82
  with gr.Blocks(title="Book summarization and more") as demo:
83
  with gr.Row():
84
  files = gr.Files(file_types=["txt","doc","docx","pdf"] )
85
- tokens = gr.Text("0", label="Tokens")
 
 
86
 
87
  chat = gr.ChatInterface(
88
  fn=chat,
@@ -91,8 +105,8 @@ with gr.Blocks(title="Book summarization and more") as demo:
91
  multimodal=False)
92
 
93
  other = gr.Button(interactive=False)
94
- files.upload(files_ready, [files], [tokens, chat.chatbot_state])
95
- files.change(files_changed, files, tokens)
96
 
97
 
98
  auth=os_getenv("APP_USERS", "null")
 
6
  from pathlib import Path
7
  import fitz
8
 
9
+ MODEL = 'gpt-4-turbo'
10
+ PRICE_PER_M = 10.00
11
+ LIMIT = 120000 # some space for answer
12
+
13
  api_key = os_getenv("OPENAI_APIKEY")
14
  client = OpenAI(api_key=api_key)
15
 
 
18
  f"Read the following books.\n" +
19
  f"Each book may have some pages at the beggining with data about the book, an index, or table of content, etc. " +
20
  f"Pages may have a header and/or a footer. Consider all this maybe present." +
21
+ f"Please answer, for each book, all below in the suggested format, in the language of the book:\n"+
22
  f"**Title**: ...\n"
23
  f"**Author**: ...\n"
24
  f"**Chapter Names**: ...\n"
25
  f"**Characters**: \n"
26
+ f"**Detailed Summary of the whole book**: \n"
27
  )
28
  prompt += f"{books}\n"
29
 
 
49
  history_openai_format.append({"role": "user", "content": message})
50
 
51
  response = client.chat.completions.create(
52
+ model=MODEL,
53
  messages= history_openai_format,
54
  temperature=1.0,
55
  stream=True)
 
72
 
73
  def files_ready(filenames):
74
  encoder = encoding = tiktoken.encoding_for_model('gpt-4-turbo')
75
+ books = ''
76
  for i, name in enumerate(filenames):
77
+ books += f"\n## Document #{i+1}\nName: {Path(name).name}\n"
78
+ books += get_text(name)
79
+
80
+ prompt = get_prompt(books)
81
+ tokens = len(encoder.encode(prompt))
82
+ cost = tokens * PRICE_PER_M / 1000000 * 2 # * 2 is too much for an answer
83
 
84
+ if tokens > LIMIT:
85
+ raise gr.Error(f"Book is too long. It's {tokens} tokens long and can't be more than {LIMIT}.")
86
+ return tokens, f"${cost}", [[prompt, None]]
87
 
88
  def files_changed(filenames):
89
+ if filenames:
90
+ return "-", "-"
91
+ else:
92
+ return 0, "$0"
93
 
94
  with gr.Blocks(title="Book summarization and more") as demo:
95
  with gr.Row():
96
  files = gr.Files(file_types=["txt","doc","docx","pdf"] )
97
+ with gr.Column():
98
+ tokens = gr.Text("0", label="Tokens")
99
+ cost = gr.Text("0", label="Cost")
100
 
101
  chat = gr.ChatInterface(
102
  fn=chat,
 
105
  multimodal=False)
106
 
107
  other = gr.Button(interactive=False)
108
+ files.upload(files_ready, [files], [tokens, cost, chat.chatbot_state])
109
+ files.change(files_changed, files, [tokens, cost])
110
 
111
 
112
  auth=os_getenv("APP_USERS", "null")