hexgrad commited on
Commit
f5b65ab
·
verified ·
1 Parent(s): e1f0af6

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -7
app.py CHANGED
@@ -240,7 +240,6 @@ def clamp_speed(speed):
240
  return 2
241
  return speed
242
 
243
- sk = gr.State()
244
  # Must be backwards compatible with https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena
245
  def generate(text, voice='af', ps=None, speed=1, trim=3000, use_gpu='auto'):
246
  voices = resolve_voices(voice, warn=ps)
@@ -255,26 +254,27 @@ def generate(text, voice='af', ps=None, speed=1, trim=3000, use_gpu='auto'):
255
  tokens = tokens[:510]
256
  ps = ''.join(next(k for k, v in VOCAB.items() if i == v) for i in tokens)
257
  use_gpu = len(ps) > 99 if use_gpu == 'auto' else use_gpu
258
- global sk
259
- print('🔥', datetime.now(), text, voices, ps, use_gpu, sk)
260
  try:
261
  if use_gpu:
262
  out = forward_gpu(tokens, voices, speed)
263
  else:
264
  out = forward(tokens, voices, speed)
265
  except gr.exceptions.Error as e:
266
- if use_gpu:
267
  gr.Warning(str(e))
268
  gr.Info('Switching to CPU')
269
  out = forward(tokens, voices, speed)
270
  else:
271
  raise gr.Error(e)
 
272
  return (None, '')
273
  trim = int(trim / speed)
274
  if trim > 0:
275
  if trim * 2 >= len(out):
276
  return (None, '')
277
  out = out[trim:-trim]
 
278
  return ((SAMPLE_RATE, out), ps)
279
 
280
  def toggle_autoplay(autoplay):
@@ -425,6 +425,7 @@ def lf_generate(segments, voice, speed=1, trim=0, pad_between=0, use_gpu=True):
425
  speed = clamp_speed(speed)
426
  trim = int(trim / speed)
427
  pad_between = int(pad_between / speed)
 
428
  batch_sizes = [89, 55, 34, 21, 13, 8, 5, 3, 2, 1, 1]
429
  i = 0
430
  while i < len(token_lists):
@@ -437,11 +438,14 @@ def lf_generate(segments, voice, speed=1, trim=0, pad_between=0, use_gpu=True):
437
  else:
438
  outs = lf_forward(tokens, voices, speed)
439
  except gr.exceptions.Error as e:
440
- if use_gpu:
441
  gr.Warning(str(e))
442
  gr.Info('Switching to CPU')
443
  outs = lf_forward(tokens, voices, speed)
444
  use_gpu = False
 
 
 
445
  else:
446
  raise gr.Error(e)
447
  for out in outs:
@@ -558,8 +562,8 @@ Random Japanese texts: CC0 public domain from [Common Voice](https://github.com/
558
  with gr.Blocks() as changelog:
559
  gr.Markdown('''
560
  **28 Nov 2024**<br/>
561
- 🥈 CPU fallback<br/>
562
- 🌊 Long Form streaming and stop button
563
 
564
  **25 Nov 2024**<br/>
565
  🎨 Voice Mixer added
 
240
  return 2
241
  return speed
242
 
 
243
  # Must be backwards compatible with https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena
244
  def generate(text, voice='af', ps=None, speed=1, trim=3000, use_gpu='auto'):
245
  voices = resolve_voices(voice, warn=ps)
 
254
  tokens = tokens[:510]
255
  ps = ''.join(next(k for k, v in VOCAB.items() if i == v) for i in tokens)
256
  use_gpu = len(ps) > 99 if use_gpu == 'auto' else use_gpu
257
+ use_gpu = text not in sents
 
258
  try:
259
  if use_gpu:
260
  out = forward_gpu(tokens, voices, speed)
261
  else:
262
  out = forward(tokens, voices, speed)
263
  except gr.exceptions.Error as e:
264
+ if False and use_gpu:
265
  gr.Warning(str(e))
266
  gr.Info('Switching to CPU')
267
  out = forward(tokens, voices, speed)
268
  else:
269
  raise gr.Error(e)
270
+ print('🔥', datetime.now(), text, voices, ps, use_gpu, repr(e))
271
  return (None, '')
272
  trim = int(trim / speed)
273
  if trim > 0:
274
  if trim * 2 >= len(out):
275
  return (None, '')
276
  out = out[trim:-trim]
277
+ print('🔥', datetime.now(), text, voices, ps, use_gpu)
278
  return ((SAMPLE_RATE, out), ps)
279
 
280
  def toggle_autoplay(autoplay):
 
425
  speed = clamp_speed(speed)
426
  trim = int(trim / speed)
427
  pad_between = int(pad_between / speed)
428
+ use_gpu = True
429
  batch_sizes = [89, 55, 34, 21, 13, 8, 5, 3, 2, 1, 1]
430
  i = 0
431
  while i < len(token_lists):
 
438
  else:
439
  outs = lf_forward(tokens, voices, speed)
440
  except gr.exceptions.Error as e:
441
+ if False and use_gpu:
442
  gr.Warning(str(e))
443
  gr.Info('Switching to CPU')
444
  outs = lf_forward(tokens, voices, speed)
445
  use_gpu = False
446
+ elif outs:
447
+ gr.Warning(repr(e))
448
+ i = len(token_lists)
449
  else:
450
  raise gr.Error(e)
451
  for out in outs:
 
562
  with gr.Blocks() as changelog:
563
  gr.Markdown('''
564
  **28 Nov 2024**<br/>
565
+ 🌊 Long Form streaming and stop button<br/>
566
+ ⚠️ CPU suspended, possible abuse
567
 
568
  **25 Nov 2024**<br/>
569
  🎨 Voice Mixer added