nsfw content filter is too sensitive and outrageous, even for an example prompt, it counts as nsfw.

#76
Files changed (2) hide show
  1. app.py +1 -1
  2. requirements.txt +4 -6
app.py CHANGED
@@ -239,7 +239,7 @@ def img_to_img(model_path, prompt, neg_prompt, img, strength, guidance, steps, w
239
 
240
  if torch.cuda.is_available():
241
  pipe0 = pipe0.to("cuda")
242
- #pipe0.enable_xformers_memory_efficient_attention()
243
  last_mode = "img2img"
244
 
245
  prompt = current_model.prefix + prompt
 
239
 
240
  if torch.cuda.is_available():
241
  pipe0 = pipe0.to("cuda")
242
+ pipe0.enable_xformers_memory_efficient_attention()
243
  last_mode = "img2img"
244
 
245
  prompt = current_model.prefix + prompt
requirements.txt CHANGED
@@ -1,6 +1,6 @@
1
  --extra-index-url https://download.pytorch.org/whl/cu113
2
  torch
3
- torchvision
4
  #diffusers
5
  git+https://github.com/huggingface/diffusers.git
6
  #transformers
@@ -8,11 +8,9 @@ git+https://github.com/huggingface/transformers
8
  scipy
9
  ftfy
10
  psutil
11
- accelerate
12
- #accelerate==0.12.0
13
  #OmegaConf
14
  #pytorch_lightning
15
- triton
16
  #https://github.com/apolinario/xformers/releases/download/0.0.3/xformers-0.0.14.dev0-cp38-cp38-linux_x86_64.whl
17
- #https://github.com/camenduru/stable-diffusion-webui-colab/releases/download/0.0.15/xformers-0.0.15.dev0+4c06c79.d20221205-cp38-cp38-linux_x86_64.whl
18
- #xformers
 
1
  --extra-index-url https://download.pytorch.org/whl/cu113
2
  torch
3
+ torchvision==0.13.1+cu113
4
  #diffusers
5
  git+https://github.com/huggingface/diffusers.git
6
  #transformers
 
8
  scipy
9
  ftfy
10
  psutil
11
+ accelerate==0.12.0
 
12
  #OmegaConf
13
  #pytorch_lightning
14
+ triton==2.0.0.dev20220701
15
  #https://github.com/apolinario/xformers/releases/download/0.0.3/xformers-0.0.14.dev0-cp38-cp38-linux_x86_64.whl
16
+ https://github.com/camenduru/stable-diffusion-webui-colab/releases/download/0.0.15/xformers-0.0.15.dev0+4c06c79.d20221205-cp38-cp38-linux_x86_64.whl