dome272 commited on
Commit
7e11c2e
1 Parent(s): f2d587a

disable torch.compile

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -9,6 +9,7 @@ from diffusers.utils import numpy_to_pil
9
  from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline
10
  from diffusers.pipelines.wuerstchen import WuerstchenPrior, default_stage_c_timesteps
11
  from previewer.modules import Previewer
 
12
 
13
  DESCRIPTION = "# Würstchen"
14
  if not torch.cuda.is_available():
@@ -17,7 +18,7 @@ if not torch.cuda.is_available():
17
  MAX_SEED = np.iinfo(np.int32).max
18
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
19
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
20
- USE_TORCH_COMPILE = True
21
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
22
  PREVIEW_IMAGES = True
23
 
 
9
  from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline
10
  from diffusers.pipelines.wuerstchen import WuerstchenPrior, default_stage_c_timesteps
11
  from previewer.modules import Previewer
12
+ os.environ['TOKENIZERS_PARALLELISM'] = 'false'
13
 
14
  DESCRIPTION = "# Würstchen"
15
  if not torch.cuda.is_available():
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
20
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
21
+ USE_TORCH_COMPILE = False
22
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
23
  PREVIEW_IMAGES = True
24